Merged in 2.0/PYPE-304-nk-removing-frameHashes (pull request #121)

2.0/PYPE-304 nk removing frameHashes

Approved-by: Milan Kolar <milan@orbi.tools>
This commit is contained in:
Jakub Jezek 2019-04-23 15:46:27 +00:00 committed by Milan Kolar
commit 376ec3dedf
18 changed files with 319 additions and 254 deletions

View file

@ -10,10 +10,9 @@ import logging
log = logging.getLogger(__name__)
# # do not delete these are mandatory
# Anatomy = None
# Dataflow = None
# Metadata = None
# Colorspace = None
Anatomy = None
Dataflow = None
Colorspace = None
PACKAGE_DIR = os.path.dirname(__file__)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")

View file

@ -17,12 +17,11 @@ from .action import (
from pypeapp import Logger
# from . import (
# Anatomy,
# Colorspace,
# Metadata,
# Dataflow
# )
from . import (
Anatomy,
Colorspace,
Dataflow
)
from .templates import (
load_data_from_templates,
@ -88,7 +87,6 @@ __all__ = [
# preloaded templates
"Anatomy",
"Colorspace",
"Metadata",
"Dataflow",
# QtWidgets

View file

@ -6,6 +6,7 @@ from pyblish import api as pyblish
from .. import api
from pype.nuke import menu
import logging
from .lib import (
create_write_node
@ -44,40 +45,40 @@ if os.getenv("PYBLISH_GUI", None):
pyblish.register_gui(os.getenv("PYBLISH_GUI", None))
# class NukeHandler(Logger.logging.Handler):
# '''
# Nuke Handler - emits logs into nuke's script editor.
# warning will emit nuke.warning()
# critical and fatal would popup msg dialog to alert of the error.
# '''
#
# def __init__(self):
# api.Logger.logging.Handler.__init__(self)
# self.set_name("Pype_Nuke_Handler")
#
# def emit(self, record):
# # Formated message:
# msg = self.format(record)
#
# if record.levelname.lower() in [
# # "warning",
# "critical",
# "fatal",
# "error"
# ]:
# nuke.message(msg)
class NukeHandler(logging.Handler):
'''
Nuke Handler - emits logs into nuke's script editor.
warning will emit nuke.warning()
critical and fatal would popup msg dialog to alert of the error.
'''
#
# '''Adding Nuke Logging Handler'''
# nuke_handler = NukeHandler()
# if nuke_handler.get_name() \
# not in [handler.get_name()
# for handler in Logger.logging.root.handlers[:]]:
# api.Logger.logging.getLogger().addHandler(nuke_handler)
# api.Logger.logging.getLogger().setLevel(Logger.logging.INFO)
#
# if not self.nLogger:
# self.nLogger = Logger
def __init__(self):
logging.Handler.__init__(self)
self.set_name("Pype_Nuke_Handler")
def emit(self, record):
# Formated message:
msg = self.format(record)
if record.levelname.lower() in [
# "warning",
"critical",
"fatal",
"error"
]:
nuke.message(msg)
'''Adding Nuke Logging Handler'''
nuke_handler = NukeHandler()
if nuke_handler.get_name() \
not in [handler.get_name()
for handler in logging.root.handlers[:]]:
logging.getLogger().addHandler(nuke_handler)
logging.getLogger().setLevel(logging.INFO)
if not self.nLogger:
self.nLogger = Logger
def reload_config():
@ -113,11 +114,11 @@ def install():
# api.set_avalon_workdir()
# reload_config()
import sys
# import sys
for path in sys.path:
if path.startswith("C:\\Users\\Public"):
sys.path.remove(path)
# for path in sys.path:
# if path.startswith("C:\\Users\\Public"):
# sys.path.remove(path)
log.info("Registering Nuke plug-ins..")
pyblish.register_plugin_path(PUBLISH_PATH)
@ -139,7 +140,7 @@ def install():
menu.install()
# load data from templates
# api.load_data_from_templates()
api.load_data_from_templates()
def uninstall():

View file

@ -195,8 +195,8 @@ def set_viewers_colorspace(viewer):
erased_viewers = []
for v in viewers:
v['viewerProcess'].setValue(str(viewer.viewerProcess))
if str(viewer.viewerProcess) not in v['viewerProcess'].value():
v['viewerProcess'].setValue(str(viewer["viewerProcess"]))
if str(viewer["viewerProcess"]) not in v['viewerProcess'].value():
copy_inputs = v.dependencies()
copy_knobs = {k: v[k].value() for k in v.knobs()
if k not in filter_knobs}
@ -218,7 +218,7 @@ def set_viewers_colorspace(viewer):
nv[k].setValue(v)
# set viewerProcess
nv['viewerProcess'].setValue(str(viewer.viewerProcess))
nv['viewerProcess'].setValue(str(viewer["viewerProcess"]))
if erased_viewers:
log.warning(
@ -229,6 +229,16 @@ def set_viewers_colorspace(viewer):
def set_root_colorspace(root_dict):
assert isinstance(root_dict, dict), log.error(
"set_root_colorspace(): argument should be dictionary")
# first set OCIO
if nuke.root()["colorManagement"].value() not in str(root_dict["colorManagement"]):
nuke.root()["colorManagement"].setValue(str(root_dict["colorManagement"]))
# second set ocio version
if nuke.root()["OCIO_config"].value() not in str(root_dict["OCIO_config"]):
nuke.root()["OCIO_config"].setValue(str(root_dict["OCIO_config"]))
# then set the rest
for knob, value in root_dict.items():
if nuke.root()[knob].value() not in value:
nuke.root()[knob].setValue(str(value))
@ -244,20 +254,20 @@ def set_writes_colorspace(write_dict):
def set_colorspace():
from pype import api as pype
nuke_colorspace = getattr(pype.Colorspace, "nuke", None)
nuke_colorspace = pype.Colorspace.get("nuke", None)
try:
set_root_colorspace(nuke_colorspace.root)
set_root_colorspace(nuke_colorspace["root"])
except AttributeError:
log.error(
"set_colorspace(): missing `root` settings in template")
try:
set_viewers_colorspace(nuke_colorspace.viewer)
set_viewers_colorspace(nuke_colorspace["viewer"])
except AttributeError:
log.error(
"set_colorspace(): missing `viewer` settings in template")
try:
set_writes_colorspace(nuke_colorspace.write)
set_writes_colorspace(nuke_colorspace["write"])
except AttributeError:
log.error(
"set_colorspace(): missing `write` settings in template")
@ -440,7 +450,7 @@ def get_additional_data(container):
def get_write_node_template_attr(node):
''' Gets all defined data from presets
'''
# get avalon data from node
data = dict()

View file

@ -26,12 +26,6 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
'render': 'render',
'nukescript': 'comp',
'review': 'mov'}
exclude = []
def process(self, instance):
for ex in self.exclude:
if ex in instance.data['families']:
return
self.log.debug('instance {}'.format(instance))

View file

@ -0,0 +1,58 @@
import sys
import os
import subprocess
from avalon import api
def open(filepath):
"""Open file with system default executable"""
if sys.platform.startswith('darwin'):
subprocess.call(('open', filepath))
elif os.name == 'nt':
os.startfile(filepath)
elif os.name == 'posix':
subprocess.call(('xdg-open', filepath))
class Openfile(api.Loader):
"""Open Image Sequence with system default"""
families = ["write"]
representations = ["*"]
label = "Open"
order = -10
icon = "play-circle"
color = "orange"
def load(self, context, name, namespace, data):
from avalon.vendor import clique
directory = os.path.dirname(self.fname)
pattern = clique.PATTERNS["frames"]
files = os.listdir(directory)
representation = context["representation"]
ext = representation["name"]
path = representation["data"]["path"]
if ext in ["#"]:
collections, remainder = clique.assemble(files,
patterns=[pattern],
minimum_items=1)
seqeunce = collections[0]
first_image = list(seqeunce)[0]
filepath = os.path.normpath(os.path.join(directory, first_image))
else:
file = [f for f in files
if ext in f
if "#" not in f][0]
filepath = os.path.normpath(os.path.join(directory, file))
self.log.info("Opening : {}".format(filepath))
open(filepath)

View file

@ -1,49 +0,0 @@
import sys
import os
import subprocess
from avalon import api
def open(filepath):
"""Open file with system default executable"""
if sys.platform.startswith('darwin'):
subprocess.call(('open', filepath))
elif os.name == 'nt':
os.startfile(filepath)
elif os.name == 'posix':
subprocess.call(('xdg-open', filepath))
class PlayImageSequence(api.Loader):
"""Open Image Sequence with system default"""
families = ["write"]
representations = ["*"]
label = "Play sequence"
order = -10
icon = "play-circle"
color = "orange"
def load(self, context, name, namespace, data):
directory = self.fname
from avalon.vendor import clique
pattern = clique.PATTERNS["frames"]
files = os.listdir(directory)
collections, remainder = clique.assemble(files,
patterns=[pattern],
minimum_items=1)
assert not remainder, ("There shouldn't have been a remainder for "
"'%s': %s" % (directory, remainder))
seqeunce = collections[0]
first_image = list(seqeunce)[0]
filepath = os.path.normpath(os.path.join(directory, first_image))
self.log.info("Opening : {}".format(filepath))
open(filepath)

View file

@ -195,7 +195,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
template_data["frame"] = src_collection.format(
"{padding}") % i
anatomy_filled = anatomy.format(template_data)
test_dest_files.append(anatomy_filled.render.path)
test_dest_files.append(anatomy_filled["render"]["path"])
dst_collections, remainder = clique.assemble(test_dest_files)
dst_collection = dst_collections[0]
@ -223,7 +223,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
#
template_data.pop("frame", None)
anatomy.pop("frame", None)
fname = files
@ -239,15 +238,21 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
src = os.path.join(stagingdir, fname)
anatomy_filled = anatomy.format(template_data)
dst = anatomy_filled.render.path
dst = anatomy_filled["render"]["path"]
instance.data["transfers"].append([src, dst])
template_data["frame"] = "#" * anatomy.render.padding
if ext[1:] not in ["jpeg", "jpg", "mov", "mp4", "wav"]:
template_data["frame"] = "#" * int(anatomy_filled["render"]["padding"])
anatomy_filled = anatomy.format(template_data)
path_to_save = anatomy_filled.render.path
template = anatomy.render.fullpath
self.log.debug('ext[1:]: {}'.format(ext[1:]))
path_to_save = anatomy_filled["render"]["path"]
template = anatomy.templates["render"]["path"]
self.log.debug("path_to_save: {}".format(path_to_save))
representation = {
"schema": "pype:representation-2.0",

View file

@ -35,7 +35,7 @@ class CrateWriteRender(avalon.nuke.Creator):
data = OrderedDict()
data["family"] = self.family
data["family"] = self.family.split("_")[-1]
data["families"] = self.families
{data.update({k: v}) for k, v in self.data.items()
@ -103,48 +103,51 @@ class CrateWritePrerender(avalon.nuke.Creator):
create_write_node(self.data["subset"], write_data)
return
class CrateWriteStill(avalon.nuke.Creator):
# change this to template preset
preset = "still"
name = "WriteStill"
label = "Create Write Still"
hosts = ["nuke"]
family = "{}_write".format(preset)
families = preset
icon = "image"
def __init__(self, *args, **kwargs):
super(CrateWriteStill, self).__init__(*args, **kwargs)
data = OrderedDict()
data["family"] = self.family
data["families"] = self.families
{data.update({k: v}) for k, v in self.data.items()
if k not in data.keys()}
self.data = data
def process(self):
self.name = self.data["subset"]
instance = nuke.toNode(self.data["subset"])
family = self.family
node = 'write'
if not instance:
write_data = {
"frame_range": [nuke.frame(), nuke.frame()],
"class": node,
"preset": self.preset,
"avalon": self.data
}
nuke.createNode("FrameHold", "first_frame {}".format(nuke.frame()))
create_write_node(self.data["subset"], write_data)
return
#
#
# class CrateWriteStill(avalon.nuke.Creator):
# # change this to template preset
# preset = "still"
#
# name = "WriteStill"
# label = "Create Write Still"
# hosts = ["nuke"]
# family = "{}_write".format(preset)
# families = preset
# icon = "image"
#
# def __init__(self, *args, **kwargs):
# super(CrateWriteStill, self).__init__(*args, **kwargs)
#
# data = OrderedDict()
#
# data["family"] = self.family.split("_")[-1]
# data["families"] = self.families
#
# {data.update({k: v}) for k, v in self.data.items()
# if k not in data.keys()}
# self.data = data
#
# def process(self):
# self.name = self.data["subset"]
#
# node_name = self.data["subset"].replace(
# "_", "_f{}_".format(nuke.frame()))
# instance = nuke.toNode(self.data["subset"])
# self.data["subset"] = node_name
#
# family = self.family
# node = 'write'
#
# if not instance:
# write_data = {
# "frame_range": [nuke.frame(), nuke.frame()],
# "class": node,
# "preset": self.preset,
# "avalon": self.data
# }
#
# nuke.createNode("FrameHold", "first_frame {}".format(nuke.frame()))
# create_write_node(node_name, write_data)
#
# return

View file

@ -8,7 +8,7 @@ import avalon.io as io
import nuke
from pype.api import Logger
log = Logger.get_looger(__name__, "nuke")
log = Logger().get_logger(__name__, "nuke")
@contextlib.contextmanager
@ -88,8 +88,6 @@ class LoadSequence(api.Loader):
containerise,
viewer_update_and_undo_stop
)
# for k, v in context.items():
# log.info("key: `{}`, value: {}\n".format(k, v))
version = context['version']
version_data = version.get("data", {})
@ -137,12 +135,14 @@ class LoadSequence(api.Loader):
data_imprint.update({k: context["version"]['data'][k]})
data_imprint.update({"objectName": read_name})
r["tile_color"].setValue(int("0x4ecd25ff", 16))
return containerise(r,
name=name,
namespace=namespace,
context=context,
loader=self.__class__.__name__,
data=data_imprint)
name=name,
namespace=namespace,
context=context,
loader=self.__class__.__name__,
data=data_imprint)
def switch(self, container, representation):
self.update(container, representation)
@ -150,18 +150,17 @@ class LoadSequence(api.Loader):
def update(self, container, representation):
"""Update the Loader's path
Fusion automatically tries to reset some variables when changing
Nuke automatically tries to reset some variables when changing
the loader's path to a new file. These automatic changes are to its
inputs:
"""
from avalon.nuke import (
viewer_update_and_undo_stop,
ls_img_sequence,
update_container
)
log.info("this i can see")
node = nuke.toNode(container['objectName'])
# TODO: prepare also for other Read img/geo/camera
assert node.Class() == "Read", "Must be Read"
@ -170,8 +169,19 @@ class LoadSequence(api.Loader):
file = ls_img_sequence(os.path.dirname(root), one=True)
# Get start frame from version data
version = io.find_one({"type": "version",
"_id": representation["parent"]})
version = io.find_one({
"type": "version",
"_id": representation["parent"]
})
# get all versions in list
versions = io.find({
"type": "version",
"parent": version["parent"]
}).distinct('name')
max_version = max(versions)
start = version["data"].get("startFrame")
if start is None:
log.warning("Missing start frame for updated version"
@ -179,24 +189,44 @@ class LoadSequence(api.Loader):
"{} ({})".format(node['name'].value(), representation))
start = 0
with viewer_update_and_undo_stop():
# Update the loader's path whilst preserving some values
with preserve_trim(node):
node["file"].setValue(file["path"])
log.info("__ node['file']: {}".format(node["file"]))
# Update the loader's path whilst preserving some values
with preserve_trim(node):
node["file"].setValue(file["path"])
# Set the global in to the start frame of the sequence
global_in_changed = loader_shift(node, start, relative=False)
if global_in_changed:
# Log this change to the user
log.debug("Changed '{}' global in:"
" {:d}".format(node['name'].value(), start))
# Set the global in to the start frame of the sequence
global_in_changed = loader_shift(node, start, relative=False)
if global_in_changed:
# Log this change to the user
log.debug("Changed '{}' global in:"
" {:d}".format(node['name'].value(), start))
updated_dict = {}
updated_dict.update({
"representation": str(representation["_id"]),
"startFrame": start,
"endFrame": version["data"].get("endFrame"),
"version": version.get("name"),
"colorspace": version["data"].get("colorspace"),
"source": version["data"].get("source"),
"handles": version["data"].get("handles"),
"fps": version["data"].get("fps"),
"author": version["data"].get("author"),
"outputDir": version["data"].get("outputDir"),
})
# Update the imprinted representation
update_container(
node,
{"representation": str(representation["_id"])}
)
# change color of node
if version.get("name") not in [max_version]:
node["tile_color"].setValue(int("0xd84f20ff", 16))
else:
node["tile_color"].setValue(int("0x4ecd25ff", 16))
# Update the imprinted representation
update_container(
node,
updated_dict
)
log.info("udated to version: {}".format(version.get("name")))
def remove(self, container):

View file

@ -18,7 +18,7 @@ class CollectInstanceFamilies(pyblish.api.InstancePlugin):
families = []
if instance.data.get('families'):
families.append(instance.data['families'])
families += instance.data['families']
# set for ftrack to accept
# instance.data["families"] = ["ftrack"]
@ -36,10 +36,8 @@ class CollectInstanceFamilies(pyblish.api.InstancePlugin):
families.append('ftrack')
instance.data["families"] = families
# Sort/grouped by family (preserving local index)
instance.context[:] = sorted(instance.context, key=self.sort_by_family)

View file

@ -1,13 +1,9 @@
import os
import tempfile
import nuke
import pyblish.api
import logging
import pype.api as pype
log = logging.get_logger(__name__)
@pyblish.api.log
class CollectNukeWrites(pyblish.api.ContextPlugin):
@ -65,9 +61,6 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
int(last_frame)
)
# preredered frames
# collect frames by try
# collect families in next file
if "files" not in instance.data:
instance.data["files"] = list()
try:
@ -89,8 +82,6 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
"colorspace": node["colorspace"].value(),
})
self.log.debug("instance.data: {}".format(instance.data))
self.log.debug("context: {}".format(context))

View file

@ -17,18 +17,11 @@ class NukeRenderLocal(pype.api.Extractor):
order = pyblish.api.ExtractorOrder
label = "Render Local"
hosts = ["nuke"]
families = ["render.local", "prerender.local", "still.local"]
families = ["render.local"]
def process(self, instance):
node = instance[0]
# This should be a ContextPlugin, but this is a workaround
# for a bug in pyblish to run once for a family: issue #250
context = instance.context
key = "__hasRun{}".format(self.__class__.__name__)
if context.data.get(key, False):
return
else:
context.data[key] = True
self.log.debug("instance collected: {}".format(instance.data))
@ -70,8 +63,9 @@ class NukeRenderLocal(pype.api.Extractor):
collections, remainder = clique.assemble(*instance.data['files'])
self.log.info('collections: {}'.format(str(collections)))
collection = collections[0]
instance.data['collection'] = collection
if collections:
collection = collections[0]
instance.data['collection'] = collection
self.log.info('Finished render')
return

View file

@ -28,21 +28,29 @@ class ExtractDataForReview(pype.api.Extractor):
self.log.debug("creating staging dir:")
self.staging_dir(instance)
self.render_review_representation(instance,
representation="mov")
self.log.debug("review mov:")
self.transcode_mov(instance)
self.render_review_representation(instance,
representation="jpeg")
self.log.debug("instance: {}".format(instance))
self.log.debug("instance.data[families]: {}".format(
instance.data["families"]))
if "still" not in instance.data["families"]:
self.render_review_representation(instance,
representation="mov")
self.log.debug("review mov:")
self.transcode_mov(instance)
self.log.debug("instance.data: {}".format(instance.data))
self.render_review_representation(instance,
representation="jpeg")
else:
self.log.debug("instance: {}".format(instance))
self.render_review_representation(instance, representation="jpeg")
# Restore selection
[i["selected"].setValue(False) for i in nuke.allNodes()]
[i["selected"].setValue(True) for i in selection]
def transcode_mov(self, instance):
import subprocess
collection = instance.data["collection"]
staging_dir = instance.data["stagingDir"]
staging_dir = instance.data["stagingDir"].replace("\\", "/")
file_name = collection.format("{head}mov")
review_mov = os.path.join(staging_dir, file_name).replace("\\", "/")
@ -53,13 +61,16 @@ class ExtractDataForReview(pype.api.Extractor):
out, err = (
ffmpeg
.input(input_movie)
.output(review_mov, pix_fmt='yuv420p', crf=18, timecode="00:00:00:01")
.output(
review_mov,
pix_fmt='yuv420p',
crf=18,
timecode="00:00:00:01"
)
.overwrite_output()
.run()
)
self.log.debug("Removing `{0}`...".format(
instance.data["baked_colorspace_movie"]))
os.remove(instance.data["baked_colorspace_movie"])
@ -72,23 +83,32 @@ class ExtractDataForReview(pype.api.Extractor):
assert instance.data['files'], "Instance data files should't be empty!"
import clique
import nuke
temporary_nodes = []
staging_dir = instance.data["stagingDir"]
staging_dir = instance.data["stagingDir"].replace("\\", "/")
self.log.debug("StagingDir `{0}`...".format(staging_dir))
collection = instance.data.get("collection", None)
# Create nodes
first_frame = min(collection.indexes)
last_frame = max(collection.indexes)
if collection:
# get path
fname = os.path.basename(collection.format(
"{head}{padding}{tail}"))
fhead = collection.format("{head}")
# get first and last frame
first_frame = min(collection.indexes)
last_frame = max(collection.indexes)
else:
fname = os.path.basename(instance.data.get("path", None))
fhead = os.path.splitext(fname)[0] + "."
first_frame = instance.data.get("startFrame", None)
last_frame = instance.data.get("endFrame", None)
node = previous_node = nuke.createNode("Read")
node["file"].setValue(
os.path.join(staging_dir,
os.path.basename(collection.format(
"{head}{padding}{tail}"))).replace("\\", "/"))
os.path.join(staging_dir, fname).replace("\\", "/"))
node["first"].setValue(first_frame)
node["origfirst"].setValue(first_frame)
@ -126,7 +146,7 @@ class ExtractDataForReview(pype.api.Extractor):
write_node = nuke.createNode("Write")
if representation in "mov":
file = collection.format("{head}baked.mov")
file = fhead + "baked.mov"
path = os.path.join(staging_dir, file).replace("\\", "/")
self.log.debug("Path: {}".format(path))
instance.data["baked_colorspace_movie"] = path
@ -137,7 +157,7 @@ class ExtractDataForReview(pype.api.Extractor):
temporary_nodes.append(write_node)
elif representation in "jpeg":
file = collection.format("{head}jpeg")
file = fhead + "jpeg"
path = os.path.join(staging_dir, file).replace("\\", "/")
instance.data["thumbnail"] = path
write_node["file"].setValue(path)
@ -147,8 +167,8 @@ class ExtractDataForReview(pype.api.Extractor):
temporary_nodes.append(write_node)
# retime for
first_frame = int(last_frame)/2
last_frame = int(last_frame)/2
first_frame = int(last_frame) / 2
last_frame = int(last_frame) / 2
# add into files for integration as representation
instance.data["files"].append(file)

View file

@ -28,7 +28,7 @@ class ValidateScript(pyblish.api.InstancePlugin):
]
# Value of these attributes can be found on parents
hierarchical_attributes = ["fps"]
hierarchical_attributes = ["fps", "resolution_width", "resolution_height", "pixel_aspect"]
missing_attributes = []
asset_attributes = {}
@ -80,6 +80,7 @@ class ValidateScript(pyblish.api.InstancePlugin):
# Compare asset's values Nukescript X Database
not_matching = []
for attr in attributes:
self.log.debug("asset vs script attribute: {0}, {1}".format(asset_attributes[attr], script_attributes[attr]))
if asset_attributes[attr] != script_attributes[attr]:
not_matching.append(attr)

View file

@ -4,7 +4,7 @@ import sys
from avalon import io, api as avalon, lib as avalonlib
from . import lib
# from pypeapp.api import (Templates, Logger, format)
from pypeapp import Logger
from pypeapp import Logger, config, Anatomy
log = Logger().get_logger(__name__, os.getenv("AVALON_APP", "pype-config"))
@ -19,7 +19,7 @@ def set_session():
def load_data_from_templates():
"""
Load Templates `contextual` data as singleton object
Load Presets and Anatomy `contextual` data as singleton object
[info](https://en.wikipedia.org/wiki/Singleton_pattern)
Returns:
@ -31,17 +31,29 @@ def load_data_from_templates():
if not any([
api.Dataflow,
api.Anatomy,
api.Colorspace,
api.Metadata
api.Colorspace
]
):
# base = Templates()
t = Templates(type=["anatomy", "metadata", "dataflow", "colorspace"])
api.Anatomy = t.anatomy
api.Metadata = t.metadata.format()
data = {"metadata": api.Metadata}
api.Dataflow = t.dataflow.format(data)
api.Colorspace = t.colorspace
presets = config.get_presets()
anatomy = Anatomy()
try:
# try if it is not in projects custom directory
# `{PYPE_PROJECT_CONFIGS}/[PROJECT_NAME]/init.json`
# init.json define preset names to be used
p_init = presets["init"]
colorspace = presets["colorspace"][p_init["colorspace"]]
dataflow = presets["dataflow"][p_init["dataflow"]]
except KeyError:
log.warning("No projects custom preset available...")
colorspace = presets["colorspace"]["default"]
dataflow = presets["dataflow"]["default"]
log.info("Presets `colorspace` and `dataflow` loaded from `default`...")
api.Anatomy = anatomy
api.Dataflow = dataflow
api.Colorspace = colorspace
log.info("Data from templates were Loaded...")
@ -59,7 +71,6 @@ def reset_data_from_templates():
api.Dataflow = None
api.Anatomy = None
api.Colorspace = None
api.Metadata = None
log.info("Data from templates were Unloaded...")
@ -283,11 +294,12 @@ def get_workdir_template(data=None):
load_data_from_templates()
anatomy = api.Anatomy
anatomy_filled = anatomy.format(data or get_context_data())
try:
work = anatomy.work.format(data or get_context_data())
work = anatomy_filled["work"]
except Exception as e:
log.error("{0} Error in "
"get_workdir_template(): {1}".format(__name__, e))
return os.path.join(work.root, work.folder)
return work["folder"]