Merge branch 'release/2.5' into develop

This commit is contained in:
Milan Kolar 2020-02-10 22:49:17 +01:00
commit c71b8dbf2c
26 changed files with 186 additions and 644 deletions

View file

@ -9,7 +9,7 @@ from pypeapp import config
import logging
log = logging.getLogger(__name__)
__version__ = "2.3.0"
__version__ = "2.5.0"
PACKAGE_DIR = os.path.dirname(__file__)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")

View file

@ -33,42 +33,6 @@ if os.getenv("PYBLISH_GUI", None):
pyblish.register_gui(os.getenv("PYBLISH_GUI", None))
# class NukeHandler(logging.Handler):
# '''
# Nuke Handler - emits logs into nuke's script editor.
# warning will emit nuke.warning()
# critical and fatal would popup msg dialog to alert of the error.
# '''
#
# def __init__(self):
# logging.Handler.__init__(self)
# self.set_name("Pype_Nuke_Handler")
#
# def emit(self, record):
# # Formated message:
# msg = self.format(record)
#
# if record.levelname.lower() in [
# # "warning",
# "critical",
# "fatal",
# "error"
# ]:
# msg = self.format(record)
# nuke.message(msg)
#
#
# '''Adding Nuke Logging Handler'''
# log.info([handler.get_name() for handler in logging.root.handlers[:]])
# nuke_handler = NukeHandler()
# if nuke_handler.get_name() \
# not in [handler.get_name()
# for handler in logging.root.handlers[:]]:
# logging.getLogger().addHandler(nuke_handler)
# logging.getLogger().setLevel(logging.INFO)
# log.info([handler.get_name() for handler in logging.root.handlers[:]])
def reload_config():
"""Attempt to reload pipeline at run-time.

View file

@ -374,7 +374,7 @@ def create_write_node(name, data, input=None, prenodes=None):
now_node.setInput(0, prev_node)
# imprinting group node
avalon.nuke.imprint(GN, data["avalon"], tab="Pype")
avalon.nuke.imprint(GN, data["avalon"])
divider = nuke.Text_Knob('')
GN.addKnob(divider)
@ -645,15 +645,105 @@ class WorkfileSettings(object):
write_dict (dict): nuke write node as dictionary
'''
# TODO: complete this function so any write node in
# scene will have fixed colorspace following presets for the project
if not isinstance(write_dict, dict):
msg = "set_root_colorspace(): argument should be dictionary"
nuke.message(msg)
log.error(msg)
return
log.debug("__ set_writes_colorspace(): {}".format(write_dict))
from avalon.nuke import get_avalon_knob_data
for node in nuke.allNodes():
if node.Class() in ["Viewer", "Dot"]:
continue
# get data from avalon knob
avalon_knob_data = get_avalon_knob_data(node, ["avalon:", "ak:"])
if not avalon_knob_data:
continue
if avalon_knob_data["id"] != "pyblish.avalon.instance":
continue
# establish families
families = [avalon_knob_data["family"]]
if avalon_knob_data.get("families"):
families.append(avalon_knob_data.get("families"))
# except disabled nodes but exclude backdrops in test
for fmly, knob in write_dict.items():
write = None
if (fmly in families):
# Add all nodes in group instances.
if node.Class() == "Group":
node.begin()
for x in nuke.allNodes():
if x.Class() == "Write":
write = x
node.end()
elif node.Class() == "Write":
write = node
else:
log.warning("Wrong write node Class")
write["colorspace"].setValue(str(knob["colorspace"]))
log.info(
"Setting `{0}` to `{1}`".format(
write.name(),
knob["colorspace"]))
def set_reads_colorspace(self, reads):
""" Setting colorspace to Read nodes
Looping trought all read nodes and tries to set colorspace based on regex rules in presets
"""
changes = dict()
for n in nuke.allNodes():
file = nuke.filename(n)
if not n.Class() == "Read":
continue
# load nuke presets for Read's colorspace
read_clrs_presets = get_colorspace_preset().get(
"nuke", {}).get("read", {})
# check if any colorspace presets for read is mathing
preset_clrsp = next((read_clrs_presets[k]
for k in read_clrs_presets
if bool(re.search(k, file))),
None)
log.debug(preset_clrsp)
if preset_clrsp is not None:
current = n["colorspace"].value()
future = str(preset_clrsp)
if current != future:
changes.update({
n.name(): {
"from": current,
"to": future
}
})
log.debug(changes)
if changes:
msg = "Read nodes are not set to correct colospace:\n\n"
for nname, knobs in changes.items():
msg += str(" - node: '{0}' is now '{1}' "
"but should be '{2}'\n").format(
nname, knobs["from"], knobs["to"]
)
msg += "\nWould you like to change it?"
if nuke.ask(msg):
for nname, knobs in changes.items():
n = nuke.toNode(nname)
n["colorspace"].setValue(knobs["to"])
log.info(
"Setting `{0}` to `{1}`".format(
nname,
knobs["to"]))
def set_colorspace(self):
''' Setting colorpace following presets
@ -671,6 +761,7 @@ class WorkfileSettings(object):
msg = "set_colorspace(): missing `viewer` settings in template"
nuke.message(msg)
log.error(msg)
try:
self.set_writes_colorspace(nuke_colorspace["write"])
except AttributeError:
@ -678,6 +769,10 @@ class WorkfileSettings(object):
nuke.message(msg)
log.error(msg)
reads = nuke_colorspace.get("read")
if reads:
self.set_reads_colorspace(reads)
try:
for key in nuke_colorspace:
log.debug("Preset's colorspace key: {}".format(key))

View file

@ -1,24 +0,0 @@
import pyblish.api
class WriteToRender(pyblish.api.InstancePlugin):
"""Swith Render knob on write instance to on,
so next time publish will be set to render
"""
order = pyblish.api.ExtractorOrder + 0.1
label = "Write to render next"
optional = True
hosts = ["nuke", "nukeassist"]
families = ["write"]
def process(self, instance):
return
if [f for f in instance.data["families"]
if ".frames" in f]:
instance[0]["render"].setValue(True)
self.log.info("Swith write node render to `on`")
else:
# swith to
instance[0]["render"].setValue(False)
self.log.info("Swith write node render to `Off`")

View file

@ -1,14 +0,0 @@
import pyblish.api
import nuke
class CollectActiveViewer(pyblish.api.ContextPlugin):
"""Collect any active viewer from nodes
"""
order = pyblish.api.CollectorOrder + 0.3
label = "Collect Active Viewer"
hosts = ["nuke"]
def process(self, context):
context.data["ActiveViewer"] = nuke.activeViewer()

View file

@ -1,22 +0,0 @@
import pyblish
class ExtractFramesToIntegrate(pyblish.api.InstancePlugin):
"""Extract rendered frames for integrator
"""
order = pyblish.api.ExtractorOrder
label = "Extract rendered frames"
hosts = ["nuke"]
families = ["render"]
def process(self, instance\
return
# staging_dir = instance.data.get('stagingDir', None)
# output_dir = instance.data.get('outputDir', None)
#
# if not staging_dir:
# staging_dir = output_dir
# instance.data['stagingDir'] = staging_dir
# # instance.data['transfer'] = False

View file

@ -1,116 +0,0 @@
import os
import nuke
import pyblish.api
class Extract(pyblish.api.InstancePlugin):
"""Super class for write and writegeo extractors."""
order = pyblish.api.ExtractorOrder
optional = True
label = "Extract Nuke [super]"
hosts = ["nuke"]
match = pyblish.api.Subset
# targets = ["process.local"]
def execute(self, instance):
# Get frame range
node = instance[0]
first_frame = nuke.root()["first_frame"].value()
last_frame = nuke.root()["last_frame"].value()
if node["use_limit"].value():
first_frame = node["first"].value()
last_frame = node["last"].value()
# Render frames
nuke.execute(node.name(), int(first_frame), int(last_frame))
class ExtractNukeWrite(Extract):
""" Extract output from write nodes. """
families = ["write", "local"]
label = "Extract Write"
def process(self, instance):
self.execute(instance)
# Validate output
for filename in list(instance.data["collection"]):
if not os.path.exists(filename):
instance.data["collection"].remove(filename)
self.log.warning("\"{0}\" didn't render.".format(filename))
class ExtractNukeCache(Extract):
label = "Cache"
families = ["cache", "local"]
def process(self, instance):
self.execute(instance)
# Validate output
msg = "\"{0}\" didn't render.".format(instance.data["output_path"])
assert os.path.exists(instance.data["output_path"]), msg
class ExtractNukeCamera(Extract):
label = "Camera"
families = ["camera", "local"]
def process(self, instance):
node = instance[0]
node["writeGeometries"].setValue(False)
node["writePointClouds"].setValue(False)
node["writeAxes"].setValue(False)
file_path = node["file"].getValue()
node["file"].setValue(instance.data["output_path"])
self.execute(instance)
node["writeGeometries"].setValue(True)
node["writePointClouds"].setValue(True)
node["writeAxes"].setValue(True)
node["file"].setValue(file_path)
# Validate output
msg = "\"{0}\" didn't render.".format(instance.data["output_path"])
assert os.path.exists(instance.data["output_path"]), msg
class ExtractNukeGeometry(Extract):
label = "Geometry"
families = ["geometry", "local"]
def process(self, instance):
node = instance[0]
node["writeCameras"].setValue(False)
node["writePointClouds"].setValue(False)
node["writeAxes"].setValue(False)
file_path = node["file"].getValue()
node["file"].setValue(instance.data["output_path"])
self.execute(instance)
node["writeCameras"].setValue(True)
node["writePointClouds"].setValue(True)
node["writeAxes"].setValue(True)
node["file"].setValue(file_path)
# Validate output
msg = "\"{0}\" didn't render.".format(instance.data["output_path"])
assert os.path.exists(instance.data["output_path"]), msg

View file

@ -1,40 +0,0 @@
import pyblish.api
import os
import pype
import shutil
class ExtractScript(pype.api.Extractor):
"""Publish script
"""
label = 'Extract Script'
order = pyblish.api.ExtractorOrder - 0.05
optional = True
hosts = ['nuke']
families = ["workfile"]
def process(self, instance):
self.log.debug("instance extracting: {}".format(instance.data))
current_script = instance.context.data["currentFile"]
# Define extract output file path
stagingdir = self.staging_dir(instance)
filename = "{0}".format(instance.data["name"])
path = os.path.join(stagingdir, filename)
self.log.info("Performing extraction..")
shutil.copy(current_script, path)
if "representations" not in instance.data:
instance.data["representations"] = list()
representation = {
'name': 'nk',
'ext': '.nk',
'files': filename,
"stagingDir": stagingdir,
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))

View file

@ -1,27 +0,0 @@
import pyblish.api
import shutil
import os
class CopyStagingDir(pyblish.api.InstancePlugin):
"""Copy data rendered into temp local directory
"""
order = pyblish.api.IntegratorOrder - 2
label = "Copy data from temp dir"
hosts = ["nuke", "nukeassist"]
families = ["render.local"]
def process(self, instance):
temp_dir = instance.data.get("stagingDir")
output_dir = instance.data.get("outputDir")
# copy data to correct dir
if not os.path.exists(output_dir):
os.makedirs(output_dir)
self.log.info("output dir has been created")
for f in os.listdir(temp_dir):
self.log.info("copy file to correct destination: {}".format(f))
shutil.copy(os.path.join(temp_dir, os.path.basename(f)),
os.path.join(output_dir, os.path.basename(f)))

View file

@ -1,98 +0,0 @@
import re
import os
import json
import subprocess
import pyblish.api
from pype.action import get_errored_plugins_from_data
def _get_script():
"""Get path to the image sequence script"""
# todo: use a more elegant way to get the python script
try:
from pype.fusion.scripts import publish_filesequence
except Exception:
raise RuntimeError("Expected module 'publish_imagesequence'"
"to be available")
module_path = publish_filesequence.__file__
if module_path.endswith(".pyc"):
module_path = module_path[:-len(".pyc")] + ".py"
return module_path
class PublishImageSequence(pyblish.api.InstancePlugin):
"""Publish the generated local image sequences."""
order = pyblish.api.IntegratorOrder
label = "Publish Rendered Image Sequence(s)"
hosts = ["fusion"]
families = ["saver.renderlocal"]
def process(self, instance):
# Skip this plug-in if the ExtractImageSequence failed
errored_plugins = get_errored_plugins_from_data(instance.context)
if any(plugin.__name__ == "FusionRenderLocal" for plugin in
errored_plugins):
raise RuntimeError("Fusion local render failed, "
"publishing images skipped.")
subset = instance.data["subset"]
ext = instance.data["ext"]
# Regex to match resulting renders
regex = "^{subset}.*[0-9]+{ext}+$".format(subset=re.escape(subset),
ext=re.escape(ext))
# The instance has most of the information already stored
metadata = {
"regex": regex,
"frameStart": instance.context.data["frameStart"],
"frameEnd": instance.context.data["frameEnd"],
"families": ["imagesequence"],
}
# Write metadata and store the path in the instance
output_directory = instance.data["outputDir"]
path = os.path.join(output_directory,
"{}_metadata.json".format(subset))
with open(path, "w") as f:
json.dump(metadata, f)
assert os.path.isfile(path), ("Stored path is not a file for %s"
% instance.data["name"])
# Suppress any subprocess console
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = subprocess.SW_HIDE
process = subprocess.Popen(["python", _get_script(),
"--paths", path],
bufsize=1,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
startupinfo=startupinfo)
while True:
output = process.stdout.readline()
# Break when there is no output or a return code has been given
if output == '' and process.poll() is not None:
process.stdout.close()
break
if output:
line = output.strip()
if line.startswith("ERROR"):
self.log.error(line)
else:
self.log.info(line)
if process.returncode != 0:
raise RuntimeError("Process quit with non-zero "
"return code: {}".format(process.returncode))

View file

@ -1,24 +0,0 @@
import pyblish.api
import nuke
class ValidateActiveViewer(pyblish.api.ContextPlugin):
"""Validate presentse of the active viewer from nodes
"""
order = pyblish.api.ValidatorOrder
label = "Validate Active Viewer"
hosts = ["nuke"]
def process(self, context):
viewer_process_node = context.data.get("ViewerProcess")
assert viewer_process_node, (
"Missing active viewer process! Please click on output write node and push key number 1-9"
)
active_viewer = context.data["ActiveViewer"]
active_input = active_viewer.activeInput()
assert active_input is not None, (
"Missing active viewer input! Please click on output write node and push key number 1-9"
)

View file

@ -1,36 +0,0 @@
import os
import pyblish.api
import pype.utils
@pyblish.api.log
class RepairNukeWriteNodeVersionAction(pyblish.api.Action):
label = "Repair"
on = "failed"
icon = "wrench"
def process(self, context, plugin):
import pype.nuke.lib as nukelib
instances = pype.utils.filter_instances(context, plugin)
for instance in instances:
node = instance[0]
render_path = nukelib.get_render_path(node)
self.log.info("render_path: {}".format(render_path))
node['file'].setValue(render_path.replace("\\", "/"))
class ValidateVersionMatch(pyblish.api.InstancePlugin):
"""Checks if write version matches workfile version"""
label = "Validate Version Match"
order = pyblish.api.ValidatorOrder
actions = [RepairNukeWriteNodeVersionAction]
hosts = ["nuke"]
families = ['write']
def process(self, instance):
assert instance.data['version'] == instance.context.data['version'], "\
Version in write doesn't match version of the workfile"

View file

@ -1,59 +0,0 @@
import pyblish.api
import pype.api
import pype.nuke.actions
class RepairWriteFamiliesAction(pyblish.api.Action):
label = "Fix Write's render attributes"
on = "failed"
icon = "wrench"
def process(self, instance, plugin):
self.log.info("instance {}".format(instance))
instance["render"].setValue(True)
self.log.info("Rendering toggled ON")
@pyblish.api.log
class ValidateWriteFamilies(pyblish.api.InstancePlugin):
""" Validates write families. """
order = pyblish.api.ValidatorOrder
label = "Valitade writes families"
hosts = ["nuke"]
families = ["write"]
actions = [pype.nuke.actions.SelectInvalidAction, pype.api.RepairAction]
@staticmethod
def get_invalid(self, instance):
if not [f for f in instance.data["families"]
if ".frames" in f]:
return
if not instance.data.get('files'):
return (instance)
def process(self, instance):
self.log.debug('instance.data["files"]: {}'.format(instance.data['files']))
invalid = self.get_invalid(self, instance)
if invalid:
raise ValueError(str("`{}`: Switch `Render` on! "
"> {}".format(__name__, invalid)))
# if any(".frames" in f for f in instance.data["families"]):
# if not instance.data["files"]:
# raise ValueError("instance {} is set to publish frames\
# but no files were collected, render the frames first or\
# check 'render' checkbox onthe no to 'ON'".format(instance)))
#
#
# self.log.info("Checked correct writes families")
@classmethod
def repair(cls, instance):
cls.log.info("instance {}".format(instance))
instance[0]["render"].setValue(True)
cls.log.info("Rendering toggled ON")

View file

@ -4,9 +4,7 @@ import contextlib
from avalon import api, io
from pype.nuke import presets
from pype.api import Logger
log = Logger().get_logger(__name__, "nuke")
from pypeapp import config
@contextlib.contextmanager
@ -34,14 +32,14 @@ def preserve_trim(node):
if start_at_frame:
node['frame_mode'].setValue("start at")
node['frame'].setValue(str(script_start))
log.info("start frame of Read was set to"
"{}".format(script_start))
print("start frame of Read was set to"
"{}".format(script_start))
if offset_frame:
node['frame_mode'].setValue("offset")
node['frame'].setValue(str((script_start + offset_frame)))
log.info("start frame of Read was set to"
"{}".format(script_start))
print("start frame of Read was set to"
"{}".format(script_start))
def loader_shift(node, frame, relative=True):
@ -70,11 +68,37 @@ def loader_shift(node, frame, relative=True):
return int(script_start)
def add_review_presets_config():
returning = {
"families": list(),
"representations": list()
}
review_presets = config.get_presets()["plugins"]["global"]["publish"].get(
"ExtractReview", {})
outputs = review_presets.get("outputs", {})
#
for output, properities in outputs.items():
returning["representations"].append(output)
returning["families"] += properities.get("families", [])
return returning
class LoadMov(api.Loader):
"""Load mov file into Nuke"""
presets = add_review_presets_config()
families = [
"source",
"plate",
"render",
"review"] + presets["families"]
families = ["write", "source", "plate", "render", "review"]
representations = ["wipmov", "h264", "mov", "preview", "review", "mp4"]
representations = [
"mov",
"preview",
"review",
"mp4"] + presets["representations"]
label = "Load mov"
order = -10
@ -115,7 +139,7 @@ class LoadMov(api.Loader):
if not file:
repr_id = context["representation"]["_id"]
log.warning(
self.log.warning(
"Representation id `{}` is failing to load".format(repr_id))
return
@ -211,7 +235,7 @@ class LoadMov(api.Loader):
if not file:
repr_id = representation["_id"]
log.warning(
self.log.warning(
"Representation id `{}` is failing to load".format(repr_id))
return
@ -246,9 +270,10 @@ class LoadMov(api.Loader):
colorspace = version_data.get("colorspace")
if first is None:
log.warning("Missing start frame for updated version"
"assuming starts at frame 0 for: "
"{} ({})".format(node['name'].value(), representation))
self.log.warning("Missing start frame for updated version"
"assuming starts at frame 0 for: "
"{} ({})".format(
node['name'].value(), representation))
first = 0
# fix handle start and end if none are available
@ -264,7 +289,7 @@ class LoadMov(api.Loader):
# Update the loader's path whilst preserving some values
with preserve_trim(node):
node["file"].setValue(file)
log.info("__ node['file']: {}".format(node["file"].value()))
self.log.info("__ node['file']: {}".format(node["file"].value()))
# Set the global in to the start frame of the sequence
loader_shift(node, first, relative=True)
@ -290,7 +315,6 @@ class LoadMov(api.Loader):
if preset_clrsp is not None:
node["colorspace"].setValue(str(preset_clrsp))
updated_dict = {}
updated_dict.update({
"representation": str(representation["_id"]),
@ -316,7 +340,7 @@ class LoadMov(api.Loader):
update_container(
node, updated_dict
)
log.info("udated to version: {}".format(version.get("name")))
self.log.info("udated to version: {}".format(version.get("name")))
def remove(self, container):

View file

@ -5,10 +5,6 @@ import contextlib
from avalon import api, io
from pype.nuke import presets
from pype.api import Logger
log = Logger().get_logger(__name__, "nuke")
@contextlib.contextmanager
def preserve_trim(node):
@ -35,14 +31,14 @@ def preserve_trim(node):
if start_at_frame:
node['frame_mode'].setValue("start at")
node['frame'].setValue(str(script_start))
log.info("start frame of Read was set to"
"{}".format(script_start))
print("start frame of Read was set to"
"{}".format(script_start))
if offset_frame:
node['frame_mode'].setValue("offset")
node['frame'].setValue(str((script_start + offset_frame)))
log.info("start frame of Read was set to"
"{}".format(script_start))
print("start frame of Read was set to"
"{}".format(script_start))
def loader_shift(node, frame, relative=True):
@ -74,7 +70,7 @@ def loader_shift(node, frame, relative=True):
class LoadSequence(api.Loader):
"""Load image sequence into Nuke"""
families = ["write", "source", "plate", "render"]
families = ["render2d", "source", "plate", "render"]
representations = ["exr", "dpx", "jpg", "jpeg", "png"]
label = "Load sequence"
@ -91,7 +87,7 @@ class LoadSequence(api.Loader):
version = context['version']
version_data = version.get("data", {})
log.info("version_data: {}\n".format(version_data))
self.log.info("version_data: {}\n".format(version_data))
self.first_frame = int(nuke.root()["first_frame"].getValue())
self.handle_start = version_data.get("handleStart", 0)
@ -111,7 +107,7 @@ class LoadSequence(api.Loader):
if not file:
repr_id = context["representation"]["_id"]
log.warning(
self.log.warning(
"Representation id `{}` is failing to load".format(repr_id))
return
@ -242,7 +238,7 @@ class LoadSequence(api.Loader):
if not file:
repr_id = representation["_id"]
log.warning(
self.log.warning(
"Representation id `{}` is failing to load".format(repr_id))
return
@ -277,9 +273,10 @@ class LoadSequence(api.Loader):
last = version_data.get("frameEnd")
if first is None:
log.warning("Missing start frame for updated version"
"assuming starts at frame 0 for: "
"{} ({})".format(node['name'].value(), representation))
self.log.warning("Missing start frame for updated version"
"assuming starts at frame 0 for: "
"{} ({})".format(
node['name'].value(), representation))
first = 0
first -= self.handle_start
@ -288,7 +285,7 @@ class LoadSequence(api.Loader):
# Update the loader's path whilst preserving some values
with preserve_trim(node):
node["file"].setValue(file)
log.info("__ node['file']: {}".format(node["file"].value()))
self.log.info("__ node['file']: {}".format(node["file"].value()))
# Set the global in to the start frame of the sequence
loader_shift(node, first, relative=True)
@ -328,7 +325,7 @@ class LoadSequence(api.Loader):
node,
updated_dict
)
log.info("udated to version: {}".format(version.get("name")))
self.log.info("udated to version: {}".format(version.get("name")))
def remove(self, container):

View file

@ -15,12 +15,6 @@ class ValidateScript(pyblish.api.InstancePlugin):
def process(self, instance):
ctx_data = instance.context.data
asset_name = ctx_data["asset"]
# asset = io.find_one({
# "type": "asset",
# "name": asset_name
# })
asset = lib.get_asset(asset_name)
asset_data = asset["data"]

View file

@ -105,7 +105,6 @@ class CollectClips(api.ContextPlugin):
"asset": asset,
"family": "clip",
"families": [],
"handles": 0,
"handleStart": projectdata.get("handleStart", 0),
"handleEnd": projectdata.get("handleEnd", 0),
"version": int(version)})

View file

@ -11,7 +11,9 @@ class CollectVideoTracksLuts(pyblish.api.InstancePlugin):
def process(self, instance):
self.log.debug("Finding soft effect for subset: `{}`".format(instance.data.get("subset")))
self.log.debug(
"Finding soft effect for subset: `{}`".format(
instance.data.get("subset")))
# taking active sequence
subset = instance.data.get("subset")
@ -41,8 +43,12 @@ class CollectVideoTracksLuts(pyblish.api.InstancePlugin):
if len(instance.data.get("effectTrackItems", {}).keys()) > 0:
instance.data["families"] += ["lut"]
self.log.debug("effects.keys: {}".format(instance.data.get("effectTrackItems", {}).keys()))
self.log.debug("effects: {}".format(instance.data.get("effectTrackItems", {})))
self.log.debug(
"effects.keys: {}".format(
instance.data.get("effectTrackItems", {}).keys()))
self.log.debug(
"effects: {}".format(
instance.data.get("effectTrackItems", {})))
def add_effect(self, instance, track_index, item):
track = item.parentTrack().name()

View file

@ -24,7 +24,6 @@ class CollectClipHandles(api.ContextPlugin):
continue
# get handles
handles = int(instance.data["handles"])
handle_start = int(instance.data["handleStart"])
handle_end = int(instance.data["handleEnd"])
@ -38,19 +37,16 @@ class CollectClipHandles(api.ContextPlugin):
self.log.debug("Adding to shared assets: `{}`".format(
instance.data["name"]))
asset_shared.update({
"handles": handles,
"handleStart": handle_start,
"handleEnd": handle_end
})
for instance in filtered_instances:
if not instance.data.get("main") and not instance.data.get("handleTag"):
self.log.debug("Synchronize handles on: `{}`".format(
instance.data["name"]))
name = instance.data["asset"]
s_asset_data = assets_shared.get(name)
instance.data["handles"] = s_asset_data.get("handles", 0)
instance.data["handleStart"] = s_asset_data.get(
"handleStart", 0
)

View file

@ -263,7 +263,6 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
# get custom attributes of the shot
if instance.data.get("main"):
in_info['custom_attributes'] = {
'handles': int(instance.data.get('handles', 0)),
"handleStart": handle_start,
"handleEnd": handle_end,
"frameStart": instance.data["frameStart"],

View file

@ -134,7 +134,6 @@ class CollectPlatesData(api.InstancePlugin):
# add to data of representation
version_data.update({
"handles": version_data['handleStart'],
"colorspace": item.sourceMediaColourTransform(),
"colorspaceScript": instance.context.data["colorspace"],
"families": [f for f in families if 'ftrack' not in f],
@ -156,8 +155,9 @@ class CollectPlatesData(api.InstancePlugin):
ext=ext
)
start_frame = source_first_frame + instance.data["sourceInH"]
duration = instance.data["sourceOutH"] - instance.data["sourceInH"]
start_frame = int(source_first_frame + instance.data["sourceInH"])
duration = int(
instance.data["sourceOutH"] - instance.data["sourceInH"])
end_frame = start_frame + duration
self.log.debug("start_frame: `{}`".format(start_frame))
self.log.debug("end_frame: `{}`".format(end_frame))

View file

@ -125,7 +125,7 @@ class CollectReviews(api.InstancePlugin):
thumb_path,
format='png'
)
self.log.debug("__ sourceIn: `{}`".format(instance.data["sourceIn"]))
self.log.debug("__ thumbnail: `{}`, frame: `{}`".format(thumbnail, thumb_frame))
@ -145,7 +145,10 @@ class CollectReviews(api.InstancePlugin):
item = instance.data["item"]
transfer_data = [
"handleStart", "handleEnd", "sourceIn", "sourceOut", "frameStart", "frameEnd", "sourceInH", "sourceOutH", "clipIn", "clipOut", "clipInH", "clipOutH", "asset", "track", "version"
"handleStart", "handleEnd", "sourceIn", "sourceOut",
"frameStart", "frameEnd", "sourceInH", "sourceOutH",
"clipIn", "clipOut", "clipInH", "clipOutH", "asset",
"track", "version"
]
version_data = dict()
@ -154,7 +157,6 @@ class CollectReviews(api.InstancePlugin):
# add to data of representation
version_data.update({
"handles": version_data['handleStart'],
"colorspace": item.sourceMediaColourTransform(),
"families": instance.data["families"],
"subset": instance.data["subset"],

View file

@ -38,7 +38,9 @@ class CollectClipTagHandles(api.ContextPlugin):
# gets arguments if there are any
t_args = t_metadata.get("tag.args", "")
assert t_args, self.log.error("Tag with Handles is missing Args. Use only handle start/end")
assert t_args, self.log.error(
"Tag with Handles is missing Args. "
"Use only handle start/end")
t_args = json.loads(t_args.replace("'", "\""))
# add in start
@ -55,8 +57,8 @@ class CollectClipTagHandles(api.ContextPlugin):
# adding handles to asset_shared on context
if instance.data.get("handleEnd"):
assets_shared_a["handleEnd"] = instance.data["handleEnd"]
assets_shared_a[
"handleEnd"] = instance.data["handleEnd"]
if instance.data.get("handleStart"):
assets_shared_a["handleStart"] = instance.data["handleStart"]
if instance.data.get("handles"):
assets_shared_a["handles"] = instance.data["handles"]
assets_shared_a[
"handleStart"] = instance.data["handleStart"]

View file

@ -6,6 +6,7 @@ import pyblish.api
import tempfile
from avalon import io, api
class ExtractVideoTracksLuts(pyblish.api.InstancePlugin):
"""Collect video tracks effects into context."""
@ -17,9 +18,12 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin):
item = instance.data["item"]
effects = instance.data.get("effectTrackItems")
instance.data["families"] = [f for f in instance.data.get("families", []) if f not in ["lut"]]
instance.data["families"] = [f for f in instance.data.get(
"families", []) if f not in ["lut"]]
self.log.debug("___ instance.data[families]: `{}`".format(instance.data["families"]))
self.log.debug(
"__ instance.data[families]: `{}`".format(
instance.data["families"]))
# skip any without effects
if not effects:
@ -102,7 +106,6 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin):
# add to data of representation
version_data.update({
"handles": version_data['handleStart'],
"colorspace": item.sourceMediaColourTransform(),
"colorspaceScript": instance.context.data["colorspace"],
"families": ["plate", "lut"],
@ -132,7 +135,7 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin):
def copy_linked_files(self, effect, dst_dir):
for k, v in effect["node"].items():
if k in "file" and v is not '':
if k in "file" and v != '':
base_name = os.path.basename(v)
dst = os.path.join(dst_dir, base_name).replace("\\", "/")

View file

@ -1,79 +0,0 @@
import pyblish
from avalon import io
from pype.action import get_errored_instances_from_context
import pype.api as pype
@pyblish.api.log
class RepairNukestudioVersionUp(pyblish.api.Action):
label = "Version Up Workfile"
on = "failed"
icon = "wrench"
def process(self, context, plugin):
errored_instances = get_errored_instances_from_context(context)
# Apply pyblish logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
if instances:
project = context.data["activeProject"]
path = context.data.get("currentFile")
new_path = pype.version_up(path)
if project:
project.saveAs(new_path)
self.log.info("Project workfile version was fixed")
class ValidateVersion(pyblish.api.InstancePlugin):
"""Validate clip's versions.
"""
order = pyblish.api.ValidatorOrder
families = ["plate"]
label = "Validate Version"
actions = [RepairNukestudioVersionUp]
hosts = ["nukestudio"]
def process(self, instance):
version = int(instance.data.get("version", 0))
asset_name = instance.data.get("asset", None)
subset_name = instance.data.get("subset", None)
assert version, "The file is missing version string! example: filename_v001.hrox `{}`"
self.log.debug("Collected version: `{0}`".format(version))
found_v = 0
try:
io.install()
project = io.find_one({"type": "project"})
asset = io.find_one({
"type": "asset",
"name": asset_name,
"parent": project["_id"]
})
subset = io.find_one({
"type": "subset",
"parent": asset["_id"],
"name": subset_name
})
version_db = io.find_one({
'type': 'version',
'parent': subset["_id"],
'name': version
}) or {}
found_v = version_db.get("name", 0)
self.log.debug("Found version: `{0}`".format(found_v))
except Exception as e:
self.log.debug("Problem to get data from database for asset `{0}` subset `{1}`. Error: `{2}`".format(asset_name, subset_name, e))
assert (found_v != version), "Version must not be the same as in database `{0}`, Versions file: `{1}`, db: `{2}`".format(asset_name, version, found_v)