mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Create write, Publish write, Publish prerendered.frames, Load sequence. All is working
This commit is contained in:
parent
b03ede44f3
commit
27618c8d55
10 changed files with 247 additions and 156 deletions
|
|
@ -71,6 +71,7 @@ class AppAction(object):
|
|||
),
|
||||
self._launch
|
||||
)
|
||||
self.log.info("Application '{}' - Registered successfully".format(self.label))
|
||||
|
||||
def _discover(self, event):
|
||||
args = self._translate_event(
|
||||
|
|
|
|||
|
|
@ -113,8 +113,7 @@ def install():
|
|||
|
||||
# Disable all families except for the ones we explicitly want to see
|
||||
family_states = [
|
||||
"render",
|
||||
"still"
|
||||
"write",
|
||||
"lifeGroup",
|
||||
"backdrop",
|
||||
"imagesequence",
|
||||
|
|
|
|||
|
|
@ -241,7 +241,7 @@ def get_avalon_knob_data(node):
|
|||
import toml
|
||||
try:
|
||||
data = toml.loads(node['avalon'].value())
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
return data
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
'rig': 'rig',
|
||||
'setdress': 'setdress',
|
||||
'pointcache': 'cache',
|
||||
'review': 'mov'}
|
||||
'review': 'mov',
|
||||
'write': 'img'}
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
|
|||
|
|
@ -1,14 +1,77 @@
|
|||
import nuke
|
||||
import os
|
||||
import contextlib
|
||||
|
||||
from avalon import api
|
||||
import avalon.io as io
|
||||
|
||||
|
||||
import nuke
|
||||
|
||||
from pype.api import Logger
|
||||
log = Logger.getLogger(__name__, "nuke")
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def preserve_trim(node):
|
||||
"""Preserve the relative trim of the Loader tool.
|
||||
|
||||
This tries to preserve the loader's trim (trim in and trim out) after
|
||||
the context by reapplying the "amount" it trims on the clip's length at
|
||||
start and end.
|
||||
|
||||
"""
|
||||
# working script frame range
|
||||
script_start = nuke.root()["first_frame"].value()
|
||||
|
||||
start_at_frame = None
|
||||
offset_frame = None
|
||||
if node['frame_mode'].value() == "start at":
|
||||
start_at_frame = node['frame'].value()
|
||||
if node['frame_mode'].value() is "offset":
|
||||
offset_frame = node['frame'].value()
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if start_at_frame:
|
||||
node['frame_mode'].setValue("start at")
|
||||
node['frame'].setValue(str(script_start))
|
||||
log.info("start frame of reader was set to"
|
||||
"{}".format(script_start))
|
||||
|
||||
if offset_frame:
|
||||
node['frame_mode'].setValue("offset")
|
||||
node['frame'].setValue(str((script_start + offset_frame)))
|
||||
log.info("start frame of reader was set to"
|
||||
"{}".format(script_start))
|
||||
|
||||
|
||||
def loader_shift(node, frame, relative=True):
|
||||
"""Shift global in time by i preserving duration
|
||||
|
||||
This moves the loader by i frames preserving global duration. When relative
|
||||
is False it will shift the global in to the start frame.
|
||||
|
||||
Args:
|
||||
loader (tool): The fusion loader tool.
|
||||
frame (int): The amount of frames to move.
|
||||
relative (bool): When True the shift is relative, else the shift will
|
||||
change the global in to frame.
|
||||
|
||||
Returns:
|
||||
int: The resulting relative frame change (how much it moved)
|
||||
|
||||
"""
|
||||
# working script frame range
|
||||
script_start = nuke.root()["first_frame"].value()
|
||||
|
||||
if relative:
|
||||
node['frame_mode'].setValue("start at")
|
||||
node['frame'].setValue(str(script_start))
|
||||
|
||||
return int(script_start)
|
||||
|
||||
|
||||
class LoadSequence(api.Loader):
|
||||
"""Load image sequence into Nuke"""
|
||||
|
||||
|
|
@ -21,122 +84,132 @@ class LoadSequence(api.Loader):
|
|||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
from avalon.nuke import (
|
||||
containerise,
|
||||
ls_img_sequence,
|
||||
viewer_update_and_undo_stop
|
||||
)
|
||||
for k, v in context.items():
|
||||
log.info("key: `{}`, value: {}\n".format(k, v))
|
||||
|
||||
log.info("context: {}\n".format(context["representation"]))
|
||||
log.info("name: {}\n".format(name))
|
||||
log.info("namespace: {}\n".format(namespace))
|
||||
log.info("data: {}\n".format(data))
|
||||
return
|
||||
# # Fallback to asset name when namespace is None
|
||||
# if namespace is None:
|
||||
# namespace = context['asset']['name']
|
||||
#
|
||||
# # Use the first file for now
|
||||
# # TODO: fix path fname
|
||||
# file = ls_img_sequence(os.path.dirname(self.fname), one=True)
|
||||
#
|
||||
# # Create the Loader with the filename path set
|
||||
# with viewer_update_and_undo_stop():
|
||||
# # TODO: it might be universal read to img/geo/camera
|
||||
# r = nuke.createNode(
|
||||
# "Read",
|
||||
# "name {}".format(self.name)) # TODO: does self.name exist?
|
||||
# r["file"].setValue(file['path'])
|
||||
# if len(file['frames']) is 1:
|
||||
# first = file['frames'][0][0]
|
||||
# last = file['frames'][0][1]
|
||||
# r["originfirst"].setValue(first)
|
||||
# r["first"].setValue(first)
|
||||
# r["originlast"].setValue(last)
|
||||
# r["last"].setValue(last)
|
||||
# else:
|
||||
# first = file['frames'][0][0]
|
||||
# last = file['frames'][:-1][1]
|
||||
# r["originfirst"].setValue(first)
|
||||
# r["first"].setValue(first)
|
||||
# r["originlast"].setValue(last)
|
||||
# r["last"].setValue(last)
|
||||
# log.warning("Missing frames in image sequence")
|
||||
#
|
||||
# # Set global in point to start frame (if in version.data)
|
||||
# start = context["version"]["data"].get("startFrame", None)
|
||||
# if start is not None:
|
||||
# loader_shift(r, start, relative=False)
|
||||
#
|
||||
# containerise(r,
|
||||
# name=name,
|
||||
# namespace=namespace,
|
||||
# context=context,
|
||||
# loader=self.__class__.__name__)
|
||||
#
|
||||
# def switch(self, container, representation):
|
||||
# self.update(container, representation)
|
||||
#
|
||||
# def update(self, container, representation):
|
||||
# """Update the Loader's path
|
||||
#
|
||||
# Fusion automatically tries to reset some variables when changing
|
||||
# the loader's path to a new file. These automatic changes are to its
|
||||
# inputs:
|
||||
#
|
||||
# """
|
||||
#
|
||||
# from avalon.nuke import (
|
||||
# viewer_update_and_undo_stop,
|
||||
# ls_img_sequence,
|
||||
# update_container
|
||||
# )
|
||||
# log.info("this i can see")
|
||||
# node = container["_tool"]
|
||||
# # TODO: prepare also for other readers img/geo/camera
|
||||
# assert node.Class() == "Reader", "Must be Reader"
|
||||
#
|
||||
# root = api.get_representation_path(representation)
|
||||
# file = ls_img_sequence(os.path.dirname(root), one=True)
|
||||
#
|
||||
# # Get start frame from version data
|
||||
# version = io.find_one({"type": "version",
|
||||
# "_id": representation["parent"]})
|
||||
# start = version["data"].get("startFrame")
|
||||
# if start is None:
|
||||
# log.warning("Missing start frame for updated version"
|
||||
# "assuming starts at frame 0 for: "
|
||||
# "{} ({})".format(node['name'].value(), representation))
|
||||
# start = 0
|
||||
#
|
||||
# with viewer_update_and_undo_stop():
|
||||
#
|
||||
# # Update the loader's path whilst preserving some values
|
||||
# with preserve_trim(node):
|
||||
# with preserve_inputs(node,
|
||||
# knobs=["file",
|
||||
# "first",
|
||||
# "last",
|
||||
# "originfirst",
|
||||
# "originlast",
|
||||
# "frame_mode",
|
||||
# "frame"]):
|
||||
# node["file"] = file["path"]
|
||||
#
|
||||
# # Set the global in to the start frame of the sequence
|
||||
# global_in_changed = loader_shift(node, start, relative=False)
|
||||
# if global_in_changed:
|
||||
# # Log this change to the user
|
||||
# log.debug("Changed '{}' global in:"
|
||||
# " {:d}".format(node['name'].value(), start))
|
||||
#
|
||||
# # Update the imprinted representation
|
||||
# update_container(
|
||||
# node,
|
||||
# {"representation": str(representation["_id"])}
|
||||
# )
|
||||
#
|
||||
# def remove(self, container):
|
||||
#
|
||||
# from avalon.nuke import viewer_update_and_undo_stop
|
||||
#
|
||||
# node = container["_tool"]
|
||||
# assert node.Class() == "Reader", "Must be Reader"
|
||||
#
|
||||
# with viewer_update_and_undo_stop():
|
||||
# nuke.delete(node)
|
||||
# Fallback to asset name when namespace is None
|
||||
if namespace is None:
|
||||
namespace = context['asset']['name']
|
||||
|
||||
# Use the first file for now
|
||||
# TODO: fix path fname
|
||||
file = ls_img_sequence(os.path.dirname(self.fname), one=True)
|
||||
log.info("file: {}\n".format(file))
|
||||
|
||||
read_name = "Read_" + context["representation"]["context"]["subset"]
|
||||
# Create the Loader with the filename path set
|
||||
with viewer_update_and_undo_stop():
|
||||
# TODO: it might be universal read to img/geo/camera
|
||||
r = nuke.createNode(
|
||||
"Read",
|
||||
"name {}".format(read_name))
|
||||
r["file"].setValue(file['path'])
|
||||
if len(file['frames']) is 1:
|
||||
first = file['frames'][0][0]
|
||||
last = file['frames'][0][1]
|
||||
r["origfirst"].setValue(first)
|
||||
r["first"].setValue(first)
|
||||
r["origlast"].setValue(last)
|
||||
r["last"].setValue(last)
|
||||
else:
|
||||
first = file['frames'][0][0]
|
||||
last = file['frames'][:-1][1]
|
||||
r["origfirst"].setValue(first)
|
||||
r["first"].setValue(first)
|
||||
r["origlast"].setValue(last)
|
||||
r["last"].setValue(last)
|
||||
log.warning("Missing frames in image sequence")
|
||||
|
||||
# Set colorspace defined in version data
|
||||
colorspace = context["version"]["data"].get("colorspace", None)
|
||||
if colorspace is not None:
|
||||
r["colorspace"].setValue(str(colorspace))
|
||||
|
||||
# Set global in point to start frame (if in version.data)
|
||||
start = context["version"]["data"].get("startFrame", None)
|
||||
if start is not None:
|
||||
loader_shift(r, start, relative=True)
|
||||
|
||||
# add additional metadata from the version to imprint to Avalon knob
|
||||
add_keys = ["startFrame", "endFrame", "handles",
|
||||
"source", "colorspace", "author", "fps"]
|
||||
|
||||
data_imprint = {}
|
||||
for k in add_keys:
|
||||
data_imprint.update({k: context["version"]['data'][k]})
|
||||
|
||||
containerise(r,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__,
|
||||
data=data_imprint)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Update the Loader's path
|
||||
|
||||
Fusion automatically tries to reset some variables when changing
|
||||
the loader's path to a new file. These automatic changes are to its
|
||||
inputs:
|
||||
|
||||
"""
|
||||
|
||||
from avalon.nuke import (
|
||||
viewer_update_and_undo_stop,
|
||||
ls_img_sequence,
|
||||
update_container
|
||||
)
|
||||
log.info("this i can see")
|
||||
node = container["_tool"]
|
||||
# TODO: prepare also for other readers img/geo/camera
|
||||
assert node.Class() == "Reader", "Must be Reader"
|
||||
|
||||
root = api.get_representation_path(representation)
|
||||
file = ls_img_sequence(os.path.dirname(root), one=True)
|
||||
|
||||
# Get start frame from version data
|
||||
version = io.find_one({"type": "version",
|
||||
"_id": representation["parent"]})
|
||||
start = version["data"].get("startFrame")
|
||||
if start is None:
|
||||
log.warning("Missing start frame for updated version"
|
||||
"assuming starts at frame 0 for: "
|
||||
"{} ({})".format(node['name'].value(), representation))
|
||||
start = 0
|
||||
|
||||
with viewer_update_and_undo_stop():
|
||||
|
||||
# Update the loader's path whilst preserving some values
|
||||
with preserve_trim(node):
|
||||
node["file"] = file["path"]
|
||||
|
||||
# Set the global in to the start frame of the sequence
|
||||
global_in_changed = loader_shift(node, start, relative=False)
|
||||
if global_in_changed:
|
||||
# Log this change to the user
|
||||
log.debug("Changed '{}' global in:"
|
||||
" {:d}".format(node['name'].value(), start))
|
||||
|
||||
# Update the imprinted representation
|
||||
update_container(
|
||||
node,
|
||||
{"representation": str(representation["_id"])}
|
||||
)
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
from avalon.nuke import viewer_update_and_undo_stop
|
||||
|
||||
node = container["_tool"]
|
||||
assert node.Class() == "Reader", "Must be Reader"
|
||||
|
||||
with viewer_update_and_undo_stop():
|
||||
nuke.delete(node)
|
||||
|
|
|
|||
|
|
@ -24,24 +24,29 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
except Exception:
|
||||
continue
|
||||
|
||||
try:
|
||||
publish = node.knob("publish").value()
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# get data from avalon knob
|
||||
avalon_knob_data = get_avalon_knob_data(node)
|
||||
if not avalon_knob_data:
|
||||
continue
|
||||
subset = avalon_knob_data["subset"]
|
||||
|
||||
subset = avalon_knob_data.get("subset", None) or node["name"].value()
|
||||
|
||||
# Create instance
|
||||
instance = context.create_instance(subset)
|
||||
instance.add(node)
|
||||
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"asset": os.environ["AVALON_ASSET"],
|
||||
"label": node.name(),
|
||||
"name": node.name(),
|
||||
"subset": subset,
|
||||
"families": [avalon_knob_data["families"]],
|
||||
"family": avalon_knob_data["family"],
|
||||
"publish": node.knob("publish").value()
|
||||
"avalonKnob": avalon_knob_data,
|
||||
"publish": publish
|
||||
})
|
||||
self.log.info("collected instance: {}".format(instance.data))
|
||||
instances.append(instance)
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@ import os
|
|||
import nuke
|
||||
import pyblish.api
|
||||
import logging
|
||||
from avalon import io, api
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
@ -15,6 +17,9 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
|
|||
hosts = ["nuke", "nukeassist"]
|
||||
|
||||
def process(self, context):
|
||||
asset_data = io.find_one({"type": "asset",
|
||||
"name": api.Session["AVALON_ASSET"]})
|
||||
self.log.debug("asset_data: {}".format(asset_data["data"]))
|
||||
for instance in context.data["instances"]:
|
||||
self.log.debug("checking instance: {}".format(instance))
|
||||
node = instance[0]
|
||||
|
|
@ -63,9 +68,9 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
|
|||
else:
|
||||
# dealing with local/farm rendering
|
||||
if node["render_farm"].value():
|
||||
families = "{}.farm".format(instance.data["families"][0])
|
||||
families = "{}.farm".format(instance.data["avalonKnob"]["families"][0])
|
||||
else:
|
||||
families = "{}.local".format(instance.data["families"][0])
|
||||
families = "{}.local".format(instance.data["avalonKnob"]["families"][0])
|
||||
|
||||
self.log.debug("checking for error: {}".format(label))
|
||||
instance.data.update({
|
||||
|
|
@ -73,12 +78,16 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
|
|||
"outputDir": output_dir,
|
||||
"ext": ext,
|
||||
"label": label,
|
||||
"family": instance.data["avalonKnob"]["family"],
|
||||
"families": [families],
|
||||
"firstFrame": first_frame,
|
||||
"lastFrame": last_frame,
|
||||
"startFrame": first_frame,
|
||||
"endFrame": last_frame,
|
||||
"outputType": output_type,
|
||||
"stagingDir": output_dir,
|
||||
|
||||
"colorspace": node["colorspace"].value(),
|
||||
"handles": int(asset_data["data"].get("handles", 0)),
|
||||
"step": 1,
|
||||
"fps": int(nuke.root()['fps'].value())
|
||||
})
|
||||
|
||||
self.log.debug("instance.data: {}".format(instance.data))
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
import logging
|
||||
import shutil
|
||||
import clique
|
||||
|
||||
import errno
|
||||
import pyblish.api
|
||||
|
|
@ -110,9 +111,11 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
locations=[LOCATION],
|
||||
data=version_data)
|
||||
|
||||
self.log.debug("version: {}".format(version))
|
||||
self.log.debug("Creating version ...")
|
||||
version_id = io.insert_one(version).inserted_id
|
||||
|
||||
version_id = io.insert_one(version).inserted_id
|
||||
self.log.debug("version_id: {}".format(version_id))
|
||||
# Write to disk
|
||||
# _
|
||||
# | |
|
||||
|
|
@ -130,11 +133,11 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
# "asset": ASSET,
|
||||
# "subset": subset["name"],
|
||||
# "version": version["name"]}
|
||||
hierarchy = io.find_one({"type":'asset', "name":ASSET})['data']['parents']
|
||||
hierarchy = io.find_one({"type": 'asset', "name": ASSET})['data']['parents']
|
||||
if hierarchy:
|
||||
# hierarchy = os.path.sep.join(hierarchy)
|
||||
hierarchy = os.path.join(*hierarchy)
|
||||
|
||||
self.log.debug("hierarchy: {}".format(hierarchy))
|
||||
template_data = {"root": root,
|
||||
"project": {"name": PROJECT,
|
||||
"code": "prjX"},
|
||||
|
|
@ -145,7 +148,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
"VERSION": version["name"],
|
||||
"hierarchy": hierarchy}
|
||||
|
||||
template_publish = project["config"]["template"]["publish"]
|
||||
# template_publish = project["config"]["template"]["publish"]
|
||||
anatomy = instance.context.data['anatomy']
|
||||
|
||||
# Find the representations to transfer amongst the files
|
||||
|
|
@ -153,7 +156,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
representations = []
|
||||
|
||||
for files in instance.data["files"]:
|
||||
|
||||
# Collection
|
||||
# _______
|
||||
# |______|\
|
||||
|
|
@ -206,7 +208,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
anatomy_filled = anatomy.format(template_data)
|
||||
dst = anatomy_filled.publish.path
|
||||
|
||||
|
||||
# if instance.data.get('transfer', True):
|
||||
# dst = src
|
||||
# instance.data["transfers"].append([src, dst])
|
||||
|
|
@ -222,17 +223,17 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
# Imprint shortcut to context
|
||||
# for performance reasons.
|
||||
"context": {
|
||||
"root": root,
|
||||
"project": PROJECT,
|
||||
"projectcode": "prjX",
|
||||
'task': api.Session["AVALON_TASK"],
|
||||
"silo": asset['silo'],
|
||||
"asset": ASSET,
|
||||
"family": instance.data['family'],
|
||||
"subset": subset["name"],
|
||||
"version": version["name"],
|
||||
"hierarchy": hierarchy,
|
||||
"representation": ext[1:]
|
||||
"root": root,
|
||||
"project": PROJECT,
|
||||
"projectcode": "prjX",
|
||||
'task': api.Session["AVALON_TASK"],
|
||||
"silo": asset['silo'],
|
||||
"asset": ASSET,
|
||||
"family": instance.data['family'],
|
||||
"subset": subset["name"],
|
||||
"version": version["name"],
|
||||
"hierarchy": hierarchy,
|
||||
"representation": ext[1:]
|
||||
}
|
||||
}
|
||||
representations.append(representation)
|
||||
|
|
@ -353,9 +354,11 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
"comment": context.data.get("comment")}
|
||||
|
||||
# Include optional data if present in
|
||||
optionals = ["startFrame", "endFrame", "step", "handles"]
|
||||
optionals = ["startFrame", "endFrame", "step",
|
||||
"handles", "colorspace", "fps", "outputDir"]
|
||||
|
||||
for key in optionals:
|
||||
if key in instance.data:
|
||||
version_data[key] = instance.data[key]
|
||||
version_data[key] = instance.data.get(key, None)
|
||||
|
||||
return version_data
|
||||
|
|
|
|||
|
|
@ -29,8 +29,8 @@ class NukeRenderLocal(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.debug("instance collected: {}".format(instance.data))
|
||||
|
||||
first_frame = instance.data.get("firstFrame", None)
|
||||
last_frame = instance.data.get("lastFrame", None)
|
||||
first_frame = instance.data.get("startFrame", None)
|
||||
last_frame = instance.data.get("endFrame", None)
|
||||
node_subset_name = instance.data.get("name", None)
|
||||
|
||||
self.log.info("Starting render")
|
||||
|
|
|
|||
|
|
@ -35,8 +35,8 @@ class ValidateCollection(pyblish.api.InstancePlugin):
|
|||
collections, remainder = clique.assemble(*instance.data['files'])
|
||||
self.log.info('collections: {}'.format(str(collections)))
|
||||
|
||||
frame_length = instance.data["lastFrame"] \
|
||||
- instance.data["firstFrame"] + 1
|
||||
frame_length = instance.data["endFrame"] \
|
||||
- instance.data["startFrame"] + 1
|
||||
|
||||
if frame_length is not 1:
|
||||
assert len(collections) == 1, self.log.info(
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue