Merge branch 'develop' into feature/PYPE-71-event_server

This commit is contained in:
Jakub Trllo 2018-12-06 13:00:17 +01:00
commit 919cdf7178
20 changed files with 531 additions and 214 deletions

View file

@ -72,6 +72,7 @@ class AppAction(object):
),
self._launch
)
self.log.info("Application '{}' - Registered successfully".format(self.label))
self.log.info("Application '{}' - Registered successfully".format(self.label))

View file

@ -3,7 +3,7 @@ import sys
from avalon import api as avalon
from pyblish import api as pyblish
from .. import api as pype
from .. import api
from pype.nuke import menu
@ -15,12 +15,12 @@ import nuke
# removing logger handler created in avalon_core
for name, handler in [(handler.get_name(), handler)
for handler in pype.Logger.logging.root.handlers[:]]:
for handler in api.Logger.logging.root.handlers[:]]:
if "pype" not in str(name).lower():
pype.Logger.logging.root.removeHandler(handler)
api.Logger.logging.root.removeHandler(handler)
log = pype.Logger.getLogger(__name__, "nuke")
log = api.Logger.getLogger(__name__, "nuke")
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
@ -37,7 +37,7 @@ self = sys.modules[__name__]
self.nLogger = None
class NukeHandler(pype.Logger.logging.Handler):
class NukeHandler(api.Logger.logging.Handler):
'''
Nuke Handler - emits logs into nuke's script editor.
warning will emit nuke.warning()
@ -45,7 +45,7 @@ class NukeHandler(pype.Logger.logging.Handler):
'''
def __init__(self):
pype.Logger.logging.Handler.__init__(self)
api.Logger.logging.Handler.__init__(self)
self.set_name("Pype_Nuke_Handler")
def emit(self, record):
@ -65,11 +65,11 @@ class NukeHandler(pype.Logger.logging.Handler):
nuke_handler = NukeHandler()
if nuke_handler.get_name() \
not in [handler.get_name()
for handler in pype.Logger.logging.root.handlers[:]]:
pype.Logger.logging.getLogger().addHandler(nuke_handler)
for handler in api.Logger.logging.root.handlers[:]]:
api.Logger.logging.getLogger().addHandler(nuke_handler)
if not self.nLogger:
self.nLogger = pype.Logger
self.nLogger = api.Logger
def reload_config():
@ -86,8 +86,6 @@ def reload_config():
"app.api",
"{}.api".format(AVALON_CONFIG),
"{}.templates".format(AVALON_CONFIG),
"{}.nuke".format(AVALON_CONFIG),
"{}.nuke.lib".format(AVALON_CONFIG),
"{}.nuke.templates".format(AVALON_CONFIG),
"{}.nuke.menu".format(AVALON_CONFIG)
):
@ -100,7 +98,8 @@ def reload_config():
def install():
pype.fill_avalon_workdir()
api.fill_avalon_workdir()
reload_config()
log.info("Registering Nuke plug-ins..")
@ -113,8 +112,7 @@ def install():
# Disable all families except for the ones we explicitly want to see
family_states = [
"render",
"still"
"write",
"lifeGroup",
"backdrop",
"imagesequence",
@ -129,7 +127,7 @@ def install():
menu.install()
# load data from templates
pype.load_data_from_templates()
api.load_data_from_templates()
def uninstall():
@ -141,7 +139,7 @@ def uninstall():
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
# reset data from templates
pype.reset_data_from_templates()
api.reset_data_from_templates()
def on_pyblish_instance_toggled(instance, old_value, new_value):

View file

@ -2,6 +2,7 @@ import sys
from collections import OrderedDict
from pprint import pprint
from avalon.vendor.Qt import QtGui
from avalon import api, io
import avalon.nuke
import pype.api as pype
import nuke
@ -99,57 +100,6 @@ def add_rendering_knobs(node):
return node
def update_frame_range(start, end, root=None):
"""Set Nuke script start and end frame range
Args:
start (float, int): start frame
end (float, int): end frame
root (object, Optional): root object from nuke's script
Returns:
None
"""
knobs = {
"first_frame": start,
"last_frame": end
}
with avalon.nuke.viewer_update_and_undo_stop():
for key, value in knobs.items():
if root:
root[key].setValue(value)
else:
nuke.root()[key].setValue(value)
def get_additional_data(container):
"""Get Nuke's related data for the container
Args:
container(dict): the container found by the ls() function
Returns:
dict
"""
node = container["_tool"]
tile_color = node['tile_color'].value()
if tile_color is None:
return {}
hex = '%08x' % tile_color
rgba = [
float(int(hex[0:2], 16)) / 255.0,
float(int(hex[2:4], 16)) / 255.0,
float(int(hex[4:6], 16)) / 255.0
]
return {"color": QtGui.QColor().fromRgbF(rgba[0], rgba[1], rgba[2])}
def set_viewers_colorspace(viewer):
assert isinstance(viewer, dict), log.error(
"set_viewers_colorspace(): argument should be dictionary")
@ -241,10 +191,115 @@ def get_avalon_knob_data(node):
import toml
try:
data = toml.loads(node['avalon'].value())
except:
except Exception:
return None
return data
def reset_resolution():
"""Set resolution to project resolution."""
log.info("Reseting resolution")
project = io.find_one({"type": "project"})
asset = api.Session["AVALON_ASSET"]
asset = io.find_one({"name": asset, "type": "asset"})
try:
width = asset["data"].get("resolution_width", 1920)
height = asset["data"].get("resolution_height", 1080)
pixel_aspect = asset["data"].get("pixel_aspect", 1)
bbox = asset["data"].get("crop", "0.0.1920.1080")
try:
x, y, r, t = bbox.split(".")
except Exception as e:
x = 0
y = 0
r = width
t = height
log.error("{}: {} \nFormat:Crop need to be set with dots, example: "
"0.0.1920.1080, /nSetting to default".format(__name__, e))
except KeyError:
log.warning(
"No resolution information found for \"{0}\".".format(
project["name"]
)
)
return
used_formats = list()
for f in nuke.formats():
if project["name"] in str(f.name()):
used_formats.append(f)
else:
format_name = project["name"] + "_1"
crnt_fmt_str = ""
if used_formats:
check_format = used_formats[-1]
format_name = "{}_{}".format(
project["name"],
int(used_formats[-1].name()[-1])+1
)
log.info(
"Format exists: {}. "
"Will create new: {}...".format(
used_formats[-1].name(),
format_name)
)
crnt_fmt_kargs = {
"width": (check_format.width()),
"height": (check_format.height()),
"x": int(check_format.x()),
"y": int(check_format.y()),
"r": int(check_format.r()),
"t": int(check_format.t()),
"pixel_aspect": float(check_format.pixelAspect())
}
crnt_fmt_str = make_format_string(**crnt_fmt_kargs)
log.info("crnt_fmt_str: {}".format(crnt_fmt_str))
new_fmt_kargs = {
"width": int(width),
"height": int(height),
"x": int(x),
"y": int(y),
"r": int(r),
"t": int(t),
"pixel_aspect": float(pixel_aspect),
"project_name": format_name
}
new_fmt_str = make_format_string(**new_fmt_kargs)
log.info("new_fmt_str: {}".format(new_fmt_str))
if new_fmt_str not in crnt_fmt_str:
make_format(frm_str=new_fmt_str,
project_name=new_fmt_kargs["project_name"])
log.info("Format is set")
def make_format_string(**args):
format_str = (
"{width} "
"{height} "
"{x} "
"{y} "
"{r} "
"{t} "
"{pixel_aspect:.2f}".format(**args)
)
return format_str
def make_format(**args):
log.info("Format does't exist, will create: \n{}".format(args))
nuke.addFormat("{frm_str} "
"{project_name}".format(**args))
nuke.root()["format"].setValue("{project_name}".format(**args))
# TODO: bellow functions are wip and needs to be check where they are used
# ------------------------------------

View file

@ -5,8 +5,17 @@ from pype.nuke import lib
def install():
menubar = nuke.menu("Nuke")
menu = menubar.findItem(Session["AVALON_LABEL"])
menu.addSeparator()
menu.addCommand("Set colorspace...", lib.set_colorspace)
# replace reset resolution from avalon core to pype's
name = "Reset Resolution"
rm_item = [(i, item)
for i, item in enumerate(menu.items())
if name in item.name()][0]
menu.removeItem(rm_item[1].name())
menu.addCommand(rm_item[1].name(), lib.reset_resolution, index=rm_item[0])
# add colorspace menu item
menu.addCommand("Set colorspace...", lib.set_colorspace, index=rm_item[0]+1)

View file

@ -1,5 +1,6 @@
import pyblish.api
import os
import clique
class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
@ -20,7 +21,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
'rig': 'rig',
'setdress': 'setdress',
'pointcache': 'cache',
'review': 'mov'}
'review': 'mov',
'write': 'img',
'render': 'render'}
def process(self, instance):
@ -36,20 +39,25 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
componentList = []
transfers = instance.data["transfers"]
dst_list = instance.data['destination_list']
ft_session = instance.context.data["ftrackSession"]
location = ft_session.query(
'Location where name is "ftrack.unmanaged"').one()
self.log.debug('location {}'.format(location))
for src, dest in transfers:
filename, ext = os.path.splitext(src)
self.log.debug('source filename: ' + filename)
self.log.debug('source ext: ' + ext)
for file in instance.data['destination_list']:
self.log.debug('file {}'.format(file))
for file in dst_list:
filename, ext = os.path.splitext(file)
self.log.debug('dest ext: ' + ext)
componentList.append({"assettype_data": {
"short": asset_type,
},
"asset_data": {
"name": instance.data["subset"],
},
"assetversion_data": {
"version": version_number,
@ -57,7 +65,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
"component_data": {
"name": ext[1:], # Default component name is "main".
},
"component_path": dest,
"component_path": file,
'component_location': location,
"component_overwrite": False,
}

View file

@ -18,7 +18,7 @@ def open(filepath):
class PlayImageSequence(api.Loader):
"""Open Image Sequence with system default"""
families = ["imagesequence"]
families = ["write"]
representations = ["*"]
label = "Play sequence"

View file

@ -82,8 +82,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
self.log.debug("Establishing staging directory @ %s" % stagingdir)
project = io.find_one({"type": "project"},
projection={"config.template.publish": True})
project = io.find_one({"type": "project"})
asset = io.find_one({"type": "asset",
"name": ASSET,
@ -136,12 +135,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# \|________|
#
root = api.registered_root()
# template_data = {"root": root,
# "project": PROJECT,
# "silo": asset['silo'],
# "asset": ASSET,
# "subset": subset["name"],
# "version": version["name"]}
hierarchy = io.find_one({"type":'asset', "name":ASSET})['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
@ -149,7 +142,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
template_data = {"root": root,
"project": {"name": PROJECT,
"code": "prjX"},
"code": project['data']['code']},
"silo": asset['silo'],
"asset": ASSET,
"family": instance.data['family'],
@ -163,6 +156,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# Find the representations to transfer amongst the files
# Each should be a single representation (as such, a single extension)
representations = []
destination_list = []
for files in instance.data["files"]:
@ -195,6 +189,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
dst = anatomy_filled.publish.path
instance.data["transfers"].append([src, dst])
template = anatomy.publish.path
else:
# Single file
@ -218,13 +213,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
dst = anatomy_filled.publish.path
instance.data["transfers"].append([src, dst])
template = anatomy.publish.path
representation = {
"schema": "pype:representation-2.0",
"type": "representation",
"parent": version_id,
"name": ext[1:],
"data": {'path': dst},
"data": {'path': dst, 'template': template},
"dependencies": instance.data.get("dependencies", "").split(),
# Imprint shortcut to context
@ -232,7 +228,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"context": {
"root": root,
"project": PROJECT,
"projectcode": "prjX",
"projectcode": project['data']['code'],
'task': api.Session["AVALON_TASK"],
"silo": asset['silo'],
"asset": ASSET,
@ -243,6 +239,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"representation": ext[1:]
}
}
destination_list.append(dst)
instance.data['destination_list'] = destination_list
representations.append(representation)
self.log.info("Registering {} items".format(len(representations)))

View file

@ -0,0 +1,252 @@
import os
import contextlib
from avalon import api
import avalon.io as io
from avalon.nuke import log
import nuke
@contextlib.contextmanager
def preserve_inputs(node, knobs):
"""Preserve the node's inputs after context"""
values = {}
for name in knobs:
try:
knob_value = node[name].vaule()
values[name] = knob_value
except ValueError:
log.warning("missing knob {} in node {}"
"{}".format(name, node['name'].value()))
try:
yield
finally:
for name, value in values.items():
node[name].setValue(value)
@contextlib.contextmanager
def preserve_trim(node):
"""Preserve the relative trim of the Loader tool.
This tries to preserve the loader's trim (trim in and trim out) after
the context by reapplying the "amount" it trims on the clip's length at
start and end.
"""
# working script frame range
script_start = nuke.root()["start_frame"].value()
start_at_frame = None
offset_frame = None
if node['frame_mode'].value() == "start at":
start_at_frame = node['frame'].value()
if node['frame_mode'].value() is "offset":
offset_frame = node['frame'].value()
try:
yield
finally:
if start_at_frame:
node['frame_mode'].setValue("start at")
node['frame'].setValue(str(script_start))
log.info("start frame of reader was set to"
"{}".format(script_start))
if offset_frame:
node['frame_mode'].setValue("offset")
node['frame'].setValue(str((script_start + offset_frame)))
log.info("start frame of reader was set to"
"{}".format(script_start))
def loader_shift(node, frame, relative=True):
"""Shift global in time by i preserving duration
This moves the loader by i frames preserving global duration. When relative
is False it will shift the global in to the start frame.
Args:
loader (tool): The fusion loader tool.
frame (int): The amount of frames to move.
relative (bool): When True the shift is relative, else the shift will
change the global in to frame.
Returns:
int: The resulting relative frame change (how much it moved)
"""
# working script frame range
script_start = nuke.root()["start_frame"].value()
if node['frame_mode'].value() == "start at":
start_at_frame = node['frame'].value()
if node['frame_mode'].value() is "offset":
offset_frame = node['frame'].value()
if relative:
shift = frame
else:
if start_at_frame:
shift = frame
if offset_frame:
shift = frame + offset_frame
# Shifting global in will try to automatically compensate for the change
# in the "ClipTimeStart" and "HoldFirstFrame" inputs, so we preserve those
# input values to "just shift" the clip
with preserve_inputs(node, knobs=["file",
"first",
"last",
"originfirst",
"originlast",
"frame_mode",
"frame"]):
# GlobalIn cannot be set past GlobalOut or vice versa
# so we must apply them in the order of the shift.
if start_at_frame:
node['frame_mode'].setValue("start at")
node['frame'].setValue(str(script_start + shift))
if offset_frame:
node['frame_mode'].setValue("offset")
node['frame'].setValue(str(shift))
return int(shift)
class LoadSequence(api.Loader):
"""Load image sequence into Nuke"""
families = ["write"]
representations = ["*"]
label = "Load sequence"
order = -10
icon = "code-fork"
color = "orange"
def load(self, context, name, namespace, data):
from avalon.nuke import (
containerise,
ls_img_sequence,
viewer_update_and_undo_stop
)
log.info("here i am")
# Fallback to asset name when namespace is None
if namespace is None:
namespace = context['asset']['name']
# Use the first file for now
# TODO: fix path fname
file = ls_img_sequence(os.path.dirname(self.fname), one=True)
# Create the Loader with the filename path set
with viewer_update_and_undo_stop():
# TODO: it might be universal read to img/geo/camera
r = nuke.createNode(
"Read",
"name {}".format(self.name)) # TODO: does self.name exist?
r["file"].setValue(file['path'])
if len(file['frames']) is 1:
first = file['frames'][0][0]
last = file['frames'][0][1]
r["originfirst"].setValue(first)
r["first"].setValue(first)
r["originlast"].setValue(last)
r["last"].setValue(last)
else:
first = file['frames'][0][0]
last = file['frames'][:-1][1]
r["originfirst"].setValue(first)
r["first"].setValue(first)
r["originlast"].setValue(last)
r["last"].setValue(last)
log.warning("Missing frames in image sequence")
# Set global in point to start frame (if in version.data)
start = context["version"]["data"].get("startFrame", None)
if start is not None:
loader_shift(r, start, relative=False)
containerise(r,
name=name,
namespace=namespace,
context=context,
loader=self.__class__.__name__)
def switch(self, container, representation):
self.update(container, representation)
def update(self, container, representation):
"""Update the Loader's path
Fusion automatically tries to reset some variables when changing
the loader's path to a new file. These automatic changes are to its
inputs:
"""
from avalon.nuke import (
viewer_update_and_undo_stop,
ls_img_sequence,
update_container
)
log.info("this i can see")
node = container["_tool"]
# TODO: prepare also for other readers img/geo/camera
assert node.Class() == "Reader", "Must be Reader"
root = api.get_representation_path(representation)
file = ls_img_sequence(os.path.dirname(root), one=True)
# Get start frame from version data
version = io.find_one({"type": "version",
"_id": representation["parent"]})
start = version["data"].get("startFrame")
if start is None:
log.warning("Missing start frame for updated version"
"assuming starts at frame 0 for: "
"{} ({})".format(node['name'].value(), representation))
start = 0
with viewer_update_and_undo_stop():
# Update the loader's path whilst preserving some values
with preserve_trim(node):
with preserve_inputs(node,
knobs=["file",
"first",
"last",
"originfirst",
"originlast",
"frame_mode",
"frame"]):
node["file"] = file["path"]
# Set the global in to the start frame of the sequence
global_in_changed = loader_shift(node, start, relative=False)
if global_in_changed:
# Log this change to the user
log.debug("Changed '{}' global in:"
" {:d}".format(node['name'].value(), start))
# Update the imprinted representation
update_container(
node,
{"representation": str(representation["_id"])}
)
def remove(self, container):
from avalon.nuke import viewer_update_and_undo_stop
node = container["_tool"]
assert node.Class() == "Reader", "Must be Reader"
with viewer_update_and_undo_stop():
nuke.delete(node)

View file

@ -3,6 +3,9 @@
"""
from avalon import api
from pype.api import Logger
log = Logger.getLogger(__name__, "nuke")
class SetFrameRangeLoader(api.Loader):
@ -10,7 +13,7 @@ class SetFrameRangeLoader(api.Loader):
families = ["animation",
"camera",
"imagesequence",
"write",
"yeticache",
"pointcache"]
representations = ["*"]
@ -30,9 +33,10 @@ class SetFrameRangeLoader(api.Loader):
start = version_data.get("startFrame", None)
end = version_data.get("endFrame", None)
log.info("start: {}, end: {}".format(start, end))
if start is None or end is None:
print("Skipping setting frame range because start or "
"end frame data is missing..")
log.info("Skipping setting frame range because start or "
"end frame data is missing..")
return
lib.update_frame_range(start, end)
@ -43,7 +47,7 @@ class SetFrameRangeWithHandlesLoader(api.Loader):
families = ["animation",
"camera",
"imagesequence",
"write",
"yeticache",
"pointcache"]
representations = ["*"]

View file

@ -4,28 +4,11 @@ import contextlib
from avalon import api
import avalon.io as io
from avalon.nuke import log
import nuke
@contextlib.contextmanager
def preserve_inputs(node, knobs):
"""Preserve the node's inputs after context"""
values = {}
for name in knobs:
try:
knob_value = node[name].vaule()
values[name] = knob_value
except ValueError:
log.warning("missing knob {} in node {}"
"{}".format(name, node['name'].value()))
try:
yield
finally:
for name, value in values.items():
node[name].setValue(value)
from pype.api import Logger
log = Logger.getLogger(__name__, "nuke")
@contextlib.contextmanager
@ -38,7 +21,7 @@ def preserve_trim(node):
"""
# working script frame range
script_start = nuke.root()["start_frame"].value()
script_start = nuke.root()["first_frame"].value()
start_at_frame = None
offset_frame = None
@ -80,48 +63,19 @@ def loader_shift(node, frame, relative=True):
"""
# working script frame range
script_start = nuke.root()["start_frame"].value()
if node['frame_mode'].value() == "start at":
start_at_frame = node['frame'].value()
if node['frame_mode'].value() is "offset":
offset_frame = node['frame'].value()
script_start = nuke.root()["first_frame"].value()
if relative:
shift = frame
else:
if start_at_frame:
shift = frame
if offset_frame:
shift = frame + offset_frame
node['frame_mode'].setValue("start at")
node['frame'].setValue(str(script_start))
# Shifting global in will try to automatically compensate for the change
# in the "ClipTimeStart" and "HoldFirstFrame" inputs, so we preserve those
# input values to "just shift" the clip
with preserve_inputs(node, knobs=["file",
"first",
"last",
"originfirst",
"originlast",
"frame_mode",
"frame"]):
# GlobalIn cannot be set past GlobalOut or vice versa
# so we must apply them in the order of the shift.
if start_at_frame:
node['frame_mode'].setValue("start at")
node['frame'].setValue(str(script_start + shift))
if offset_frame:
node['frame_mode'].setValue("offset")
node['frame'].setValue(str(shift))
return int(shift)
return int(script_start)
class LoadSequence(api.Loader):
"""Load image sequence into Nuke"""
families = ["imagesequence"]
families = ["write"]
representations = ["*"]
label = "Load sequence"
@ -130,12 +84,13 @@ class LoadSequence(api.Loader):
color = "orange"
def load(self, context, name, namespace, data):
from avalon.nuke import (
containerise,
ls_img_sequence,
viewer_update_and_undo_stop
)
for k, v in context.items():
log.info("key: `{}`, value: {}\n".format(k, v))
# Fallback to asset name when namespace is None
if namespace is None:
@ -144,40 +99,56 @@ class LoadSequence(api.Loader):
# Use the first file for now
# TODO: fix path fname
file = ls_img_sequence(os.path.dirname(self.fname), one=True)
log.info("file: {}\n".format(file))
read_name = "Read_" + context["representation"]["context"]["subset"]
# Create the Loader with the filename path set
with viewer_update_and_undo_stop():
# TODO: it might be universal read to img/geo/camera
r = nuke.createNode(
"Read",
"name {}".format(self.name)) # TODO: does self.name exist?
"name {}".format(read_name))
r["file"].setValue(file['path'])
if len(file['frames']) is 1:
first = file['frames'][0][0]
last = file['frames'][0][1]
r["originfirst"].setValue(first)
r["origfirst"].setValue(first)
r["first"].setValue(first)
r["originlast"].setValue(last)
r["origlast"].setValue(last)
r["last"].setValue(last)
else:
first = file['frames'][0][0]
last = file['frames'][:-1][1]
r["originfirst"].setValue(first)
r["origfirst"].setValue(first)
r["first"].setValue(first)
r["originlast"].setValue(last)
r["origlast"].setValue(last)
r["last"].setValue(last)
log.warning("Missing frames in image sequence")
# Set colorspace defined in version data
colorspace = context["version"]["data"].get("colorspace", None)
if colorspace is not None:
r["colorspace"].setValue(str(colorspace))
# Set global in point to start frame (if in version.data)
start = context["version"]["data"].get("startFrame", None)
if start is not None:
loader_shift(r, start, relative=False)
loader_shift(r, start, relative=True)
# add additional metadata from the version to imprint to Avalon knob
add_keys = ["startFrame", "endFrame", "handles",
"source", "colorspace", "author", "fps"]
data_imprint = {}
for k in add_keys:
data_imprint.update({k: context["version"]['data'][k]})
containerise(r,
name=name,
namespace=namespace,
context=context,
loader=self.__class__.__name__)
loader=self.__class__.__name__,
data=data_imprint)
def switch(self, container, representation):
self.update(container, representation)
@ -196,7 +167,7 @@ class LoadSequence(api.Loader):
ls_img_sequence,
update_container
)
log.info("this i can see")
node = container["_tool"]
# TODO: prepare also for other readers img/geo/camera
assert node.Class() == "Reader", "Must be Reader"
@ -218,15 +189,7 @@ class LoadSequence(api.Loader):
# Update the loader's path whilst preserving some values
with preserve_trim(node):
with preserve_inputs(node,
knobs=["file",
"first",
"last",
"originfirst",
"originlast",
"frame_mode",
"frame"]):
node["file"] = file["path"]
node["file"] = file["path"]
# Set the global in to the start frame of the sequence
global_in_changed = loader_shift(node, start, relative=False)

View file

@ -28,20 +28,27 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
avalon_knob_data = get_avalon_knob_data(node)
if not avalon_knob_data:
continue
subset = avalon_knob_data["subset"]
if avalon_knob_data["id"] != "pyblish.avalon.instance":
continue
subset = avalon_knob_data.get("subset", None) or node["name"].value()
# Create instance
instance = context.create_instance(subset)
instance.add(node)
instance.data.update({
"subset": subset,
"asset": os.environ["AVALON_ASSET"],
"label": node.name(),
"name": node.name(),
"subset": subset,
"families": [avalon_knob_data["families"]],
"family": avalon_knob_data["family"],
"publish": node.knob("publish").value()
"avalonKnob": avalon_knob_data,
"publish": node.knob('publish')
})
self.log.info("collected instance: {}".format(instance.data))
instances.append(instance)

View file

@ -3,6 +3,8 @@ import os
import nuke
import pyblish.api
import logging
from avalon import io, api
log = logging.getLogger(__name__)
@ -15,6 +17,9 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
hosts = ["nuke", "nukeassist"]
def process(self, context):
asset_data = io.find_one({"type": "asset",
"name": api.Session["AVALON_ASSET"]})
self.log.debug("asset_data: {}".format(asset_data["data"]))
for instance in context.data["instances"]:
self.log.debug("checking instance: {}".format(instance))
node = instance[0]
@ -63,9 +68,9 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
else:
# dealing with local/farm rendering
if node["render_farm"].value():
families = "{}.farm".format(instance.data["families"][0])
families = "{}.farm".format(instance.data["avalonKnob"]["families"][0])
else:
families = "{}.local".format(instance.data["families"][0])
families = "{}.local".format(instance.data["avalonKnob"]["families"][0])
self.log.debug("checking for error: {}".format(label))
instance.data.update({
@ -73,12 +78,15 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
"outputDir": output_dir,
"ext": ext,
"label": label,
"families": [families],
"firstFrame": first_frame,
"lastFrame": last_frame,
"families": [families, 'ftrack'],
"startFrame": first_frame,
"endFrame": last_frame,
"outputType": output_type,
"stagingDir": output_dir,
"colorspace": node["colorspace"].value(),
"handles": int(asset_data["data"].get("handles", 0)),
"step": 1,
"fps": int(nuke.root()['fps'].value())
})
self.log.debug("instance.data: {}".format(instance.data))

View file

@ -1,6 +1,7 @@
import os
import logging
import shutil
import clique
import errno
import pyblish.api
@ -30,7 +31,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
self.register(instance)
self.log.info("Integrating Asset in to the database ...")
# self.integrate(instance)
if instance.data.get('transfer', True):
self.integrate(instance)
def register(self, instance):
@ -70,8 +72,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
self.log.debug("Establishing staging directory @ %s" % stagingdir)
project = io.find_one({"type": "project"},
projection={"config.template.publish": True})
project = io.find_one({"type": "project"})
asset = io.find_one({"type": "asset",
"name": ASSET,
@ -110,9 +111,11 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
locations=[LOCATION],
data=version_data)
self.log.debug("version: {}".format(version))
self.log.debug("Creating version ...")
version_id = io.insert_one(version).inserted_id
version_id = io.insert_one(version).inserted_id
self.log.debug("version_id: {}".format(version_id))
# Write to disk
# _
# | |
@ -124,36 +127,32 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
# \|________|
#
root = api.registered_root()
# template_data = {"root": root,
# "project": PROJECT,
# "silo": asset['silo'],
# "asset": ASSET,
# "subset": subset["name"],
# "version": version["name"]}
hierarchy = io.find_one({"type":'asset', "name":ASSET})['data']['parents']
hierarchy = io.find_one({"type": 'asset', "name": ASSET})['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*hierarchy)
self.log.debug("hierarchy: {}".format(hierarchy))
template_data = {"root": root,
"project": {"name": PROJECT,
"code": "prjX"},
"code": project['data']['code']},
"silo": asset['silo'],
"task": api.Session["AVALON_TASK"],
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
"VERSION": version["name"],
"hierarchy": hierarchy}
template_publish = project["config"]["template"]["publish"]
# template_publish = project["config"]["template"]["publish"]
anatomy = instance.context.data['anatomy']
# Find the representations to transfer amongst the files
# Each should be a single representation (as such, a single extension)
representations = []
destination_list = []
for files in instance.data["files"]:
# Collection
# _______
# |______|\
@ -164,26 +163,30 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
# |_______|
#
if isinstance(files, list):
collection = files
# Assert that each member has identical suffix
_, ext = os.path.splitext(collection[0])
assert all(ext == os.path.splitext(name)[1]
for name in collection), (
"Files had varying suffixes, this is a bug"
)
assert not any(os.path.isabs(name) for name in collection)
template_data["representation"] = ext[1:]
dst_collection = []
for fname in collection:
filename, ext = os.path.splitext(fname)
_, frame = os.path.splitext(filename)
template_data["representation"] = ext[1:]
template_data["frame"] = frame[1:]
src = os.path.join(stagingdir, fname)
anatomy_filled = anatomy.format(template_data)
dst = anatomy_filled.publish.path
dst = anatomy_filled.render.path
# if instance.data.get('transfer', True):
# instance.data["transfers"].append([src, dst])
dst_collection.append(dst)
instance.data["transfers"].append([src, dst])
template = anatomy.render.path
collections, remainder = clique.assemble(dst_collection)
dst = collections[0].format('{head}{padding}{tail}')
else:
# Single file
@ -204,19 +207,17 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
src = os.path.join(stagingdir, fname)
anatomy_filled = anatomy.format(template_data)
dst = anatomy_filled.publish.path
dst = anatomy_filled.render.path
template = anatomy.render.path
instance.data["transfers"].append([src, dst])
# if instance.data.get('transfer', True):
# dst = src
# instance.data["transfers"].append([src, dst])
representation = {
"schema": "pype:representation-2.0",
"type": "representation",
"parent": version_id,
"name": ext[1:],
"data": {'path': src},
"data": {'path': dst, 'template': template},
"dependencies": instance.data.get("dependencies", "").split(),
# Imprint shortcut to context
@ -224,7 +225,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
"context": {
"root": root,
"project": PROJECT,
"projectcode": "prjX",
"projectcode": project['data']['code'],
'task': api.Session["AVALON_TASK"],
"silo": asset['silo'],
"asset": ASSET,
@ -235,6 +236,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
"representation": ext[1:]
}
}
destination_list.append(dst)
instance.data['destination_list'] = destination_list
representations.append(representation)
self.log.info("Registering {} items".format(len(representations)))
@ -353,9 +356,11 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
"comment": context.data.get("comment")}
# Include optional data if present in
optionals = ["startFrame", "endFrame", "step", "handles"]
optionals = ["startFrame", "endFrame", "step",
"handles", "colorspace", "fps", "outputDir"]
for key in optionals:
if key in instance.data:
version_data[key] = instance.data[key]
version_data[key] = instance.data.get(key, None)
return version_data

View file

@ -29,8 +29,8 @@ class NukeRenderLocal(pyblish.api.InstancePlugin):
self.log.debug("instance collected: {}".format(instance.data))
first_frame = instance.data.get("firstFrame", None)
last_frame = instance.data.get("lastFrame", None)
first_frame = instance.data.get("startFrame", None)
last_frame = instance.data.get("endFrame", None)
node_subset_name = instance.data.get("name", None)
self.log.info("Starting render")

View file

@ -35,8 +35,8 @@ class ValidateCollection(pyblish.api.InstancePlugin):
collections, remainder = clique.assemble(*instance.data['files'])
self.log.info('collections: {}'.format(str(collections)))
frame_length = instance.data["lastFrame"] \
- instance.data["firstFrame"] + 1
frame_length = instance.data["endFrame"] \
- instance.data["startFrame"] + 1
if frame_length is not 1:
assert len(collections) == 1, self.log.info(
@ -45,6 +45,14 @@ class ValidateCollection(pyblish.api.InstancePlugin):
assert remainder is not None, self.log.info("There are some extra files in folder")
basename, ext = os.path.splitext(list(collections[0])[0])
assert all(ext == os.path.splitext(name)[1]
for name in collections[0]), self.log.info(
"Files had varying suffixes"
)
assert not any(os.path.isabs(name) for name in collections[0]), self.log.info("some file name are absolute")
self.log.info('frame_length: {}'.format(frame_length))
self.log.info('len(list(instance.data["files"])): {}'.format(
len(list(instance.data["files"][0]))))