add publish of already rendered frames, make write instance wotk with publish, render and famr attributes.

This commit is contained in:
Milan Kolar 2018-12-01 01:40:37 +01:00
parent 4ae098e4d2
commit 71b51d1fa9
5 changed files with 438 additions and 10 deletions

View file

@ -4,7 +4,6 @@ import nuke
import pyblish.api
import clique
@pyblish.api.log
class CollectNukeInstances(pyblish.api.ContextPlugin):
"""Collect all write nodes."""
@ -57,11 +56,16 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
instance.add(node)
# Adding/Checking publish and render target attribute
if "render_local" not in node.knobs():
knob = nuke.Boolean_Knob("render_local", "Local rendering")
if "farm" not in node.knobs():
knob = nuke.Boolean_Knob("farm", "Farm Rendering")
knob.setValue(False)
node.addKnob(knob)
# Adding/Checking publish and render target attribute
if "render" not in node.knobs():
knob = nuke.Boolean_Knob("render", "Render")
knob.setValue(False)
node.addKnob(knob)
instance.data.update({
"asset": os.environ["AVALON_ASSET"], # todo: not a constant
@ -70,15 +74,21 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
"outputDir": os.path.dirname(nuke.filename(node)),
"ext": ext, # todo: should be redundant
"label": label,
"families": ["render.local"],
"family": "write",
"publish": node.knob("publish"),
"family": "render",
"publish": node.knob("publish").value(),
"collection": collection,
"first_frame": first_frame,
"last_frame": last_frame,
"startFrame": first_frame,
"endFrame": last_frame,
"output_type": output_type
})
if node.knob('render').value():
instance.data["families"] = ["render.local"]
if node.knob('farm').value():
instance.data["families"] = ["render.farm"]
else:
instance.data["families"] = ["prerendered.frames"]
# Sort/grouped by family (preserving local index)
context[:] = sorted(context, key=self.sort_by_family)

View file

@ -0,0 +1,23 @@
import pyblish.api
import os
class CollectFrames(pyblish.api.InstancePlugin):
"""Inject the host into context"""
order = pyblish.api.CollectorOrder + 0.499
label = "Collect data into prerenderd frames"
hosts = ["nuke"]
families = ['prerendered.frames']
def process(self, instance):
collected_frames = os.listdir(instance.data['outputDir'])
if "files" not in instance.data:
instance.data["files"] = list()
instance.data["files"].append(collected_frames)
instance.data['stagingDir'] = instance.data['outputDir']
instance.data['transfer'] = False
self.log.info('collected frames: {}'.format(collected_frames))

View file

@ -0,0 +1,361 @@
import os
import logging
import shutil
import errno
import pyblish.api
from avalon import api, io
log = logging.getLogger(__name__)
class IntegrateFrames(pyblish.api.InstancePlugin):
"""Resolve any dependency issies
This plug-in resolves any paths which, if not updated might break
the published file.
The order of families is important, when working with lookdev you want to
first publish the texture, update the texture paths in the nodes and then
publish the shading network. Same goes for file dependent assets.
"""
label = "Integrate Frames"
order = pyblish.api.IntegratorOrder
families = ["prerendered.frames"]
def process(self, instance):
self.register(instance)
self.log.info("Integrating Asset in to the database ...")
# self.integrate(instance)
def register(self, instance):
# Required environment variables
PROJECT = api.Session["AVALON_PROJECT"]
ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"]
LOCATION = api.Session["AVALON_LOCATION"]
context = instance.context
# Atomicity
#
# Guarantee atomic publishes - each asset contains
# an identical set of members.
# __
# / o
# / \
# | o |
# \ /
# o __/
#
assert all(result["success"] for result in context.data["results"]), (
"Atomicity not held, aborting.")
# Assemble
#
# |
# v
# ---> <----
# ^
# |
#
stagingdir = instance.data.get("stagingDir")
assert stagingdir, ("Incomplete instance \"%s\": "
"Missing reference to staging area." % instance)
# extra check if stagingDir actually exists and is available
self.log.debug("Establishing staging directory @ %s" % stagingdir)
project = io.find_one({"type": "project"},
projection={"config.template.publish": True})
asset = io.find_one({"type": "asset",
"name": ASSET,
"parent": project["_id"]})
assert all([project, asset]), ("Could not find current project or "
"asset '%s'" % ASSET)
subset = self.get_subset(asset, instance)
# get next version
latest_version = io.find_one({"type": "version",
"parent": subset["_id"]},
{"name": True},
sort=[("name", -1)])
next_version = 1
if latest_version is not None:
next_version += latest_version["name"]
self.log.info("Verifying version from assumed destination")
assumed_data = instance.data["assumedTemplateData"]
assumed_version = assumed_data["version"]
if assumed_version != next_version:
raise AttributeError("Assumed version 'v{0:03d}' does not match"
"next version in database "
"('v{1:03d}')".format(assumed_version,
next_version))
self.log.debug("Next version: v{0:03d}".format(next_version))
version_data = self.create_version_data(context, instance)
version = self.create_version(subset=subset,
version_number=next_version,
locations=[LOCATION],
data=version_data)
self.log.debug("Creating version ...")
version_id = io.insert_one(version).inserted_id
# Write to disk
# _
# | |
# _| |_
# ____\ /
# |\ \ / \
# \ \ v \
# \ \________.
# \|________|
#
root = api.registered_root()
# template_data = {"root": root,
# "project": PROJECT,
# "silo": asset['silo'],
# "asset": ASSET,
# "subset": subset["name"],
# "version": version["name"]}
hierarchy = io.find_one({"type":'asset', "name":ASSET})['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*hierarchy)
template_data = {"root": root,
"project": {"name": PROJECT,
"code": "prjX"},
"silo": asset['silo'],
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
"VERSION": version["name"],
"hierarchy": hierarchy}
template_publish = project["config"]["template"]["publish"]
anatomy = instance.context.data['anatomy']
# Find the representations to transfer amongst the files
# Each should be a single representation (as such, a single extension)
representations = []
for files in instance.data["files"]:
# Collection
# _______
# |______|\
# | |\|
# | ||
# | ||
# | ||
# |_______|
#
if isinstance(files, list):
collection = files
# Assert that each member has identical suffix
_, ext = os.path.splitext(collection[0])
assert all(ext == os.path.splitext(name)[1]
for name in collection), (
"Files had varying suffixes, this is a bug"
)
assert not any(os.path.isabs(name) for name in collection)
template_data["representation"] = ext[1:]
for fname in collection:
src = os.path.join(stagingdir, fname)
anatomy_filled = anatomy.format(template_data)
dst = anatomy_filled.publish.path
# if instance.data.get('transfer', True):
# instance.data["transfers"].append([src, dst])
else:
# Single file
# _______
# | |\
# | |
# | |
# | |
# |_______|
#
fname = files
assert not os.path.isabs(fname), (
"Given file name is a full path"
)
_, ext = os.path.splitext(fname)
template_data["representation"] = ext[1:]
src = os.path.join(stagingdir, fname)
anatomy_filled = anatomy.format(template_data)
dst = anatomy_filled.publish.path
# if instance.data.get('transfer', True):
# dst = src
# instance.data["transfers"].append([src, dst])
representation = {
"schema": "pype:representation-2.0",
"type": "representation",
"parent": version_id,
"name": ext[1:],
"data": {'path': src},
"dependencies": instance.data.get("dependencies", "").split(),
# Imprint shortcut to context
# for performance reasons.
"context": {
"root": root,
"project": PROJECT,
"projectcode": "prjX",
'task': api.Session["AVALON_TASK"],
"silo": asset['silo'],
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
"version": version["name"],
"hierarchy": hierarchy,
"representation": ext[1:]
}
}
representations.append(representation)
self.log.info("Registering {} items".format(len(representations)))
io.insert_many(representations)
def integrate(self, instance):
"""Move the files
Through `instance.data["transfers"]`
Args:
instance: the instance to integrate
"""
transfers = instance.data["transfers"]
for src, dest in transfers:
self.log.info("Copying file .. {} -> {}".format(src, dest))
self.copy_file(src, dest)
def copy_file(self, src, dst):
""" Copy given source to destination
Arguments:
src (str): the source file which needs to be copied
dst (str): the destination of the sourc file
Returns:
None
"""
dirname = os.path.dirname(dst)
try:
os.makedirs(dirname)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
self.log.critical("An unexpected error occurred.")
raise
shutil.copy(src, dst)
def get_subset(self, asset, instance):
subset = io.find_one({"type": "subset",
"parent": asset["_id"],
"name": instance.data["subset"]})
if subset is None:
subset_name = instance.data["subset"]
self.log.info("Subset '%s' not found, creating.." % subset_name)
_id = io.insert_one({
"schema": "pype:subset-2.0",
"type": "subset",
"name": subset_name,
"data": {},
"parent": asset["_id"]
}).inserted_id
subset = io.find_one({"_id": _id})
return subset
def create_version(self, subset, version_number, locations, data=None):
""" Copy given source to destination
Args:
subset (dict): the registered subset of the asset
version_number (int): the version number
locations (list): the currently registered locations
Returns:
dict: collection of data to create a version
"""
# Imprint currently registered location
version_locations = [location for location in locations if
location is not None]
return {"schema": "pype:version-2.0",
"type": "version",
"parent": subset["_id"],
"name": version_number,
"locations": version_locations,
"data": data}
def create_version_data(self, context, instance):
"""Create the data collection for the version
Args:
context: the current context
instance: the current instance being published
Returns:
dict: the required information with instance.data as key
"""
families = []
current_families = instance.data.get("families", list())
instance_family = instance.data.get("family", None)
if instance_family is not None:
families.append(instance_family)
families += current_families
# create relative source path for DB
relative_path = os.path.relpath(context.data["currentFile"],
api.registered_root())
source = os.path.join("{root}", relative_path).replace("\\", "/")
version_data = {"families": families,
"time": context.data["time"],
"author": context.data["user"],
"source": source,
"comment": context.data.get("comment")}
# Include optional data if present in
optionals = ["startFrame", "endFrame", "step", "handles"]
for key in optionals:
if key in instance.data:
version_data[key] = instance.data[key]
return version_data

View file

@ -34,8 +34,8 @@ class NukeRenderLocal(pyblish.api.InstancePlugin):
node_subset_name = instance.data.get("subset", None)
self.log.info("Starting render")
self.log.info("Start frame: {}".format(first_frame))
self.log.info("End frame: {}".format(last_frame))
self.log.info("Start frame: {}".format(startFrame))
self.log.info("End frame: {}".format(endFrame))
# Render frames
nuke.execute(

View file

@ -0,0 +1,34 @@
import pyblish.api
import pype.api
import clique
import os
import glob
class ValidateCollections(pyblish.api.InstancePlugin):
"""Validates mapped resources.
These are external files to the current application, for example
these could be textures, image planes, cache files or other linked
media.
This validates:
- The resources are existing files.
- The resources have correctly collected the data.
"""
order = pype.api.ValidateContentsOrder
label = "Validate Collections"
families = ['prerendered.frames']
def process(self, instance):
collections, remainder = clique.assemble(*instance.data['files'])
self.log.info('collections: {}'.format(collections))
assert len(collections) == 1, "There are multiple collections in the folder"
collection_instance = instance.data.get('collection', None)
assert collections[0].is_contiguous(),"Some frames appear to be missing"