mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge branch 'bugfix/PYPE-428-dazzle-feedback-publish-errors' of bitbucket.org:pypeclub/pype into bugfix/PYPE-428-dazzle-feedback-publish-errors
This commit is contained in:
commit
de342e627b
32 changed files with 619 additions and 376 deletions
|
|
@ -7,11 +7,6 @@ from .lib import filter_pyblish_plugins
|
|||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# # do not delete these are mandatory
|
||||
Anatomy = None
|
||||
Dataflow = None
|
||||
Colorspace = None
|
||||
|
||||
PACKAGE_DIR = os.path.dirname(__file__)
|
||||
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
|
||||
|
||||
|
|
@ -26,6 +21,7 @@ def install():
|
|||
pyblish.register_discovery_filter(filter_pyblish_plugins)
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
|
||||
|
||||
def uninstall():
|
||||
log.info("Deregistering global plug-ins..")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
|
|
|
|||
18
pype/api.py
18
pype/api.py
|
|
@ -18,15 +18,8 @@ from .action import (
|
|||
|
||||
from pypeapp import Logger
|
||||
|
||||
from . import (
|
||||
Anatomy,
|
||||
Colorspace,
|
||||
Dataflow
|
||||
)
|
||||
|
||||
from .templates import (
|
||||
load_data_from_templates,
|
||||
reset_data_from_templates,
|
||||
get_project_name,
|
||||
get_project_code,
|
||||
get_hierarchy,
|
||||
|
|
@ -40,6 +33,7 @@ from .templates import (
|
|||
)
|
||||
|
||||
from .lib import (
|
||||
version_up,
|
||||
get_handle_irregular,
|
||||
get_project_data,
|
||||
get_asset_data,
|
||||
|
|
@ -65,11 +59,6 @@ __all__ = [
|
|||
|
||||
"ValidationException",
|
||||
|
||||
# contectual templates
|
||||
# get data to preloaded templates
|
||||
"load_data_from_templates",
|
||||
"reset_data_from_templates",
|
||||
|
||||
# get contextual data
|
||||
"get_handle_irregular",
|
||||
"get_project_data",
|
||||
|
|
@ -89,9 +78,4 @@ __all__ = [
|
|||
"get_data_hierarchical_attr",
|
||||
"get_avalon_project_template",
|
||||
|
||||
# preloaded templates
|
||||
"Anatomy",
|
||||
"Colorspace",
|
||||
"Dataflow",
|
||||
|
||||
]
|
||||
|
|
|
|||
|
|
@ -50,9 +50,6 @@ def install():
|
|||
avalon.data["familiesStateDefault"] = False
|
||||
avalon.data["familiesStateToggled"] = family_states
|
||||
|
||||
# load data from templates
|
||||
api.load_data_from_templates()
|
||||
|
||||
# launch pico server
|
||||
pico_server_launch()
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
from pype import api as pype
|
||||
from pypeapp import Anatomy, config
|
||||
|
||||
|
||||
log = pype.Logger().get_logger(__name__, "aport")
|
||||
|
||||
|
||||
def get_anatomy(**kwarg):
|
||||
return pype.Anatomy
|
||||
return Anatomy()
|
||||
|
||||
|
||||
def get_dataflow(**kwarg):
|
||||
|
|
@ -15,7 +17,8 @@ def get_dataflow(**kwarg):
|
|||
assert any([host, cls]), log.error("aport.templates.get_dataflow():"
|
||||
"Missing mandatory kwargs `host`, `cls`")
|
||||
|
||||
aport_dataflow = getattr(pype.Dataflow, str(host), None)
|
||||
presets = config.get_init_presets()
|
||||
aport_dataflow = getattr(presets["dataflow"], str(host), None)
|
||||
aport_dataflow_node = getattr(aport_dataflow.nodes, str(cls), None)
|
||||
if preset:
|
||||
aport_dataflow_node = getattr(aport_dataflow_node, str(preset), None)
|
||||
|
|
@ -32,7 +35,8 @@ def get_colorspace(**kwarg):
|
|||
assert any([host, cls]), log.error("aport.templates.get_colorspace():"
|
||||
"Missing mandatory kwargs `host`, `cls`")
|
||||
|
||||
aport_colorspace = getattr(pype.Colorspace, str(host), None)
|
||||
presets = config.get_init_presets()
|
||||
aport_colorspace = getattr(presets["colorspace"], str(host), None)
|
||||
aport_colorspace_node = getattr(aport_colorspace, str(cls), None)
|
||||
if preset:
|
||||
aport_colorspace_node = getattr(aport_colorspace_node, str(preset), None)
|
||||
|
|
|
|||
|
|
@ -132,9 +132,6 @@ def install():
|
|||
|
||||
menu.install()
|
||||
|
||||
# load data from templates
|
||||
api.load_data_from_templates()
|
||||
|
||||
# Workfiles.
|
||||
launch_workfiles = os.environ.get("WORKFILES_STARTUP")
|
||||
|
||||
|
|
@ -156,9 +153,6 @@ def uninstall():
|
|||
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
# reset data from templates
|
||||
api.reset_data_from_templates()
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
"""Toggle node passthrough states on instance toggles."""
|
||||
|
|
|
|||
|
|
@ -7,8 +7,9 @@ import avalon.nuke
|
|||
import pype.api as pype
|
||||
import nuke
|
||||
from .templates import (
|
||||
get_dataflow,
|
||||
get_colorspace
|
||||
get_colorspace_preset,
|
||||
get_node_dataflow_preset,
|
||||
get_node_colorspace_preset
|
||||
)
|
||||
|
||||
from pypeapp import Logger
|
||||
|
|
@ -121,8 +122,8 @@ def get_render_path(node):
|
|||
"preset": data['avalon']['families']
|
||||
}
|
||||
|
||||
nuke_dataflow_writes = get_dataflow(**data_preset)
|
||||
nuke_colorspace_writes = get_colorspace(**data_preset)
|
||||
nuke_dataflow_writes = get_node_dataflow_preset(**data_preset)
|
||||
nuke_colorspace_writes = get_node_colorspace_preset(**data_preset)
|
||||
|
||||
application = lib.get_application(os.environ["AVALON_APP_NAME"])
|
||||
data.update({
|
||||
|
|
@ -172,8 +173,8 @@ def script_name():
|
|||
|
||||
|
||||
def create_write_node(name, data):
|
||||
nuke_dataflow_writes = get_dataflow(**data)
|
||||
nuke_colorspace_writes = get_colorspace(**data)
|
||||
nuke_dataflow_writes = get_node_dataflow_preset(**data)
|
||||
nuke_colorspace_writes = get_node_colorspace_preset(**data)
|
||||
application = lib.get_application(os.environ["AVALON_APP_NAME"])
|
||||
|
||||
try:
|
||||
|
|
@ -311,9 +312,8 @@ def set_writes_colorspace(write_dict):
|
|||
|
||||
|
||||
def set_colorspace():
|
||||
from pype import api as pype
|
||||
|
||||
nuke_colorspace = pype.Colorspace.get("nuke", None)
|
||||
nuke_colorspace = get_colorspace_preset().get("nuke", None)
|
||||
|
||||
try:
|
||||
set_root_colorspace(nuke_colorspace["root"])
|
||||
|
|
@ -637,8 +637,8 @@ def get_write_node_template_attr(node):
|
|||
}
|
||||
|
||||
# get template data
|
||||
nuke_dataflow_writes = get_dataflow(**data_preset)
|
||||
nuke_colorspace_writes = get_colorspace(**data_preset)
|
||||
nuke_dataflow_writes = get_node_dataflow_preset(**data_preset)
|
||||
nuke_colorspace_writes = get_node_colorspace_preset(**data_preset)
|
||||
|
||||
# collecting correct data
|
||||
correct_data = OrderedDict({
|
||||
|
|
|
|||
|
|
@ -1,21 +1,33 @@
|
|||
from pype import api as pype
|
||||
from pypeapp import Anatomy, config
|
||||
|
||||
|
||||
log = pype.Logger().get_logger(__name__, "nuke")
|
||||
|
||||
|
||||
def get_anatomy(**kwarg):
|
||||
return pype.Anatomy
|
||||
return Anatomy()
|
||||
|
||||
|
||||
def get_dataflow(**kwarg):
|
||||
def get_dataflow_preset():
|
||||
presets = config.get_init_presets()
|
||||
return presets["dataflow"]
|
||||
|
||||
|
||||
def get_colorspace_preset():
|
||||
presets = config.get_init_presets()
|
||||
return presets["colorspace"]
|
||||
|
||||
|
||||
def get_node_dataflow_preset(**kwarg):
|
||||
log.info(kwarg)
|
||||
host = kwarg.get("host", "nuke")
|
||||
cls = kwarg.get("class", None)
|
||||
preset = kwarg.get("preset", None)
|
||||
assert any([host, cls]), log.error("nuke.templates.get_dataflow():"
|
||||
"Missing mandatory kwargs `host`, `cls`")
|
||||
assert any([host, cls]), log.error("nuke.templates.get_node_dataflow_preset(): \
|
||||
Missing mandatory kwargs `host`, `cls`")
|
||||
|
||||
nuke_dataflow = pype.Dataflow.get(str(host), None)
|
||||
nuke_dataflow = get_dataflow_preset().get(str(host), None)
|
||||
nuke_dataflow_nodes = nuke_dataflow.get('nodes', None)
|
||||
nuke_dataflow_node = nuke_dataflow_nodes.get(str(cls), None)
|
||||
|
||||
|
|
@ -26,15 +38,15 @@ def get_dataflow(**kwarg):
|
|||
return nuke_dataflow_node
|
||||
|
||||
|
||||
def get_colorspace(**kwarg):
|
||||
def get_node_colorspace_preset(**kwarg):
|
||||
log.info(kwarg)
|
||||
host = kwarg.get("host", "nuke")
|
||||
cls = kwarg.get("class", None)
|
||||
preset = kwarg.get("preset", None)
|
||||
assert any([host, cls]), log.error("nuke.templates.get_colorspace():"
|
||||
"Missing mandatory kwargs `host`, `cls`")
|
||||
assert any([host, cls]), log.error("nuke.templates.get_node_colorspace_preset(): \
|
||||
Missing mandatory kwargs `host`, `cls`")
|
||||
|
||||
nuke_colorspace = pype.Colorspace.get(str(host), None)
|
||||
nuke_colorspace = get_colorspace_preset().get(str(host), None)
|
||||
nuke_colorspace_node = nuke_colorspace.get(str(cls), None)
|
||||
if preset:
|
||||
nuke_colorspace_node = nuke_colorspace_node.get(str(preset), None)
|
||||
|
|
|
|||
|
|
@ -55,9 +55,6 @@ def install(config):
|
|||
|
||||
menu_install()
|
||||
|
||||
# load data from templates
|
||||
api.load_data_from_templates()
|
||||
|
||||
# Workfiles.
|
||||
launch_workfiles = os.environ.get("WORKFILES_STARTUP")
|
||||
|
||||
|
|
@ -95,9 +92,6 @@ def uninstall():
|
|||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
|
||||
# reset data from templates
|
||||
api.reset_data_from_templates()
|
||||
|
||||
|
||||
def _register_events():
|
||||
avalon.on("taskChanged", _update_menu_task_label)
|
||||
|
|
|
|||
|
|
@ -18,6 +18,9 @@ class CollectFtrackApi(pyblish.api.ContextPlugin):
|
|||
|
||||
ftrack_log = logging.getLogger('ftrack_api')
|
||||
ftrack_log.setLevel(logging.WARNING)
|
||||
ftrack_log = logging.getLogger('ftrack_api_old')
|
||||
ftrack_log.setLevel(logging.WARNING)
|
||||
|
||||
# Collect session
|
||||
session = ftrack_api.Session()
|
||||
context.data["ftrackSession"] = session
|
||||
|
|
|
|||
|
|
@ -49,14 +49,14 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
for comp in instance.data['representations']:
|
||||
self.log.debug('component {}'.format(comp))
|
||||
|
||||
if comp.get('thumbnail'):
|
||||
if comp.get('thumbnail') or ("thumbnail" in comp.get('tags', [])):
|
||||
location = self.get_ftrack_location(
|
||||
'ftrack.server', ft_session
|
||||
)
|
||||
component_data = {
|
||||
"name": "thumbnail" # Default component name is "main".
|
||||
}
|
||||
elif comp.get('preview'):
|
||||
elif comp.get('preview') or ("preview" in comp.get('tags', [])):
|
||||
'''
|
||||
Ftrack bug requirement:
|
||||
- Start frame must be 0
|
||||
|
|
@ -120,7 +120,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
|
||||
componentList.append(component_item)
|
||||
# Create copy with ftrack.unmanaged location if thumb or prev
|
||||
if comp.get('thumbnail') or comp.get('preview'):
|
||||
if comp.get('thumbnail') or comp.get('preview') \
|
||||
or ("preview" in comp.get('tags', [])) \
|
||||
or ("thumbnail" in comp.get('tags', [])):
|
||||
unmanaged_loc = self.get_ftrack_location(
|
||||
'ftrack.unmanaged', ft_session
|
||||
)
|
||||
|
|
@ -148,7 +150,6 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
|
||||
componentList.append(component_item_src)
|
||||
|
||||
|
||||
self.log.debug('componentsList: {}'.format(str(componentList)))
|
||||
instance.data["ftrackComponentsList"] = componentList
|
||||
|
||||
|
|
|
|||
|
|
@ -1,92 +0,0 @@
|
|||
# import os
|
||||
# import pyblish.api
|
||||
# import subprocess
|
||||
# from pype.vendor import clique
|
||||
# from pypeapp import config
|
||||
#
|
||||
#
|
||||
# class ExtractReview(pyblish.api.InstancePlugin):
|
||||
# """Resolve any dependency issies
|
||||
#
|
||||
# This plug-in resolves any paths which, if not updated might break
|
||||
# the published file.
|
||||
#
|
||||
# The order of families is important, when working with lookdev you want to
|
||||
# first publish the texture, update the texture paths in the nodes and then
|
||||
# publish the shading network. Same goes for file dependent assets.
|
||||
# """
|
||||
#
|
||||
# label = "Extract Review"
|
||||
# order = pyblish.api.ExtractorOrder
|
||||
# # families = ["imagesequence", "render", "write", "source"]
|
||||
# # hosts = ["shell"]
|
||||
#
|
||||
# def process(self, instance):
|
||||
# # adding plugin attributes from presets
|
||||
# publish_presets = config.get_presets()["plugins"]["global"]["publish"]
|
||||
# plugin_attrs = publish_presets[self.__class__.__name__]
|
||||
#
|
||||
#
|
||||
# fps = instance.data.get("fps")
|
||||
# start = instance.data.get("startFrame")
|
||||
# stagingdir = os.path.normpath(instance.data.get("stagingDir"))
|
||||
#
|
||||
# collected_frames = os.listdir(stagingdir)
|
||||
# collections, remainder = clique.assemble(collected_frames)
|
||||
#
|
||||
# full_input_path = os.path.join(
|
||||
# stagingdir, collections[0].format('{head}{padding}{tail}')
|
||||
# )
|
||||
# self.log.info("input {}".format(full_input_path))
|
||||
#
|
||||
# filename = collections[0].format('{head}')
|
||||
# if not filename.endswith('.'):
|
||||
# filename += "."
|
||||
# movFile = filename + "mov"
|
||||
# full_output_path = os.path.join(stagingdir, movFile)
|
||||
#
|
||||
# self.log.info("output {}".format(full_output_path))
|
||||
#
|
||||
# config_data = instance.context.data['output_repre_config']
|
||||
#
|
||||
# proj_name = os.environ.get('AVALON_PROJECT', '__default__')
|
||||
# profile = config_data.get(proj_name, config_data['__default__'])
|
||||
#
|
||||
# input_args = []
|
||||
# # overrides output file
|
||||
# input_args.append("-y")
|
||||
# # preset's input data
|
||||
# input_args.extend(profile.get('input', []))
|
||||
# # necessary input data
|
||||
# input_args.append("-start_number {}".format(start))
|
||||
# input_args.append("-i {}".format(full_input_path))
|
||||
# input_args.append("-framerate {}".format(fps))
|
||||
#
|
||||
# output_args = []
|
||||
# # preset's output data
|
||||
# output_args.extend(profile.get('output', []))
|
||||
# # output filename
|
||||
# output_args.append(full_output_path)
|
||||
# mov_args = [
|
||||
# "ffmpeg",
|
||||
# " ".join(input_args),
|
||||
# " ".join(output_args)
|
||||
# ]
|
||||
# subprocess_mov = " ".join(mov_args)
|
||||
# sub_proc = subprocess.Popen(subprocess_mov)
|
||||
# sub_proc.wait()
|
||||
#
|
||||
# if not os.path.isfile(full_output_path):
|
||||
# raise("Quicktime wasn't created succesfully")
|
||||
#
|
||||
# if "representations" not in instance.data:
|
||||
# instance.data["representations"] = []
|
||||
#
|
||||
# representation = {
|
||||
# 'name': 'mov',
|
||||
# 'ext': 'mov',
|
||||
# 'files': movFile,
|
||||
# "stagingDir": stagingdir,
|
||||
# "preview": True
|
||||
# }
|
||||
# instance.data["representations"].append(representation)
|
||||
|
|
@ -12,6 +12,5 @@ class CollectTemplates(pyblish.api.ContextPlugin):
|
|||
label = "Collect Templates"
|
||||
|
||||
def process(self, context):
|
||||
# pype.load_data_from_templates()
|
||||
context.data['anatomy'] = Anatomy()
|
||||
self.log.info("Anatomy templates collected...")
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import os
|
|||
import subprocess
|
||||
import pype.api
|
||||
import json
|
||||
import pyblish
|
||||
|
||||
|
||||
class ExtractBurnin(pype.api.Extractor):
|
||||
|
|
@ -14,7 +15,8 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
"""
|
||||
|
||||
label = "Quicktime with burnins"
|
||||
families = ["burnin"]
|
||||
order = pyblish.api.ExtractorOrder + 0.03
|
||||
families = ["review", "burnin"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -29,25 +31,30 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
"start_frame": int(instance.data['startFrame']),
|
||||
"version": "v" + str(instance.context.data['version'])
|
||||
}
|
||||
self.log.debug("__ burnin_data1: {}".format(burnin_data))
|
||||
for i, repre in enumerate(instance.data["representations"]):
|
||||
self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre))
|
||||
|
||||
for repre in instance.data["representations"]:
|
||||
if (not repre.get("burnin", False) or
|
||||
"burnin" not in repre.get("tags", [])):
|
||||
if "burnin" not in repre.get("tags", []):
|
||||
continue
|
||||
|
||||
stagingdir = self.staging_dir(instance)
|
||||
stagingdir = repre["stagingDir"]
|
||||
filename = "{0}".format(repre["files"])
|
||||
|
||||
movieFileBurnin = filename + "Burn" + ".mov"
|
||||
name = "_burnin"
|
||||
movieFileBurnin = filename.replace(".mov", "") + name + ".mov"
|
||||
|
||||
full_movie_path = os.path.join(stagingdir, repre["files"])
|
||||
full_burnin_path = os.path.join(stagingdir, movieFileBurnin)
|
||||
self.log.debug("__ full_burnin_path: {}".format(full_burnin_path))
|
||||
|
||||
burnin_data = {
|
||||
"input": full_movie_path.replace("\\", "/"),
|
||||
"output": full_burnin_path.replace("\\", "/"),
|
||||
"burnin_data": burnin_data
|
||||
}
|
||||
}
|
||||
|
||||
self.log.debug("__ burnin_data2: {}".format(burnin_data))
|
||||
|
||||
json_data = json.dumps(burnin_data)
|
||||
scriptpath = os.path.join(os.environ['PYPE_MODULE_ROOT'],
|
||||
|
|
@ -55,9 +62,22 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
"scripts",
|
||||
"otio_burnin.py")
|
||||
|
||||
p = subprocess.Popen(
|
||||
['python', scriptpath, json_data]
|
||||
)
|
||||
p.wait()
|
||||
self.log.debug("Burnin scriptpath: {}".format(scriptpath))
|
||||
|
||||
repre['files']: movieFileBurnin
|
||||
try:
|
||||
p = subprocess.Popen(
|
||||
[os.getenv("PYPE_PYTHON_EXE"), scriptpath, json_data]
|
||||
)
|
||||
p.wait()
|
||||
if not os.path.isfile(full_burnin_path):
|
||||
self.log.error(
|
||||
"Burnin file wasn't created succesfully")
|
||||
except Exception as e:
|
||||
raise RuntimeError("Burnin script didn't work: `{}`".format(e))
|
||||
|
||||
if os.path.exists(full_burnin_path):
|
||||
repre_update = {
|
||||
"files": movieFileBurnin,
|
||||
"name": repre["name"] + name
|
||||
}
|
||||
instance.data["representations"][i].update(repre_update)
|
||||
|
|
|
|||
156
pype/plugins/global/publish/extract_review.py
Normal file
156
pype/plugins/global/publish/extract_review.py
Normal file
|
|
@ -0,0 +1,156 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import subprocess
|
||||
from pype.vendor import clique
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
class ExtractReview(pyblish.api.InstancePlugin):
|
||||
"""Extracting Review mov file for Ftrack
|
||||
|
||||
Compulsory attribute of representation is tags list with "review",
|
||||
otherwise the representation is ignored.
|
||||
|
||||
All new represetnations are created and encoded by ffmpeg following
|
||||
presets found in `pype-config/presets/plugins/global/publish.json:ExtractReview:outputs`. To change the file extension
|
||||
filter values use preset's attributes `ext_filter`
|
||||
"""
|
||||
|
||||
label = "Extract Review"
|
||||
order = pyblish.api.ExtractorOrder + 0.02
|
||||
families = ["review"]
|
||||
|
||||
def process(self, instance):
|
||||
# adding plugin attributes from presets
|
||||
publish_presets = config.get_presets()["plugins"]["global"]["publish"]
|
||||
plugin_attrs = publish_presets[self.__class__.__name__]
|
||||
output_profiles = plugin_attrs.get("outputs", {})
|
||||
|
||||
inst_data = instance.data
|
||||
fps = inst_data.get("fps")
|
||||
start_frame = inst_data.get("startFrame")
|
||||
|
||||
self.log.debug("Families In: `{}`".format(instance.data["families"]))
|
||||
|
||||
# get representation and loop them
|
||||
representations = instance.data["representations"]
|
||||
|
||||
# filter out mov and img sequences
|
||||
representations_new = list()
|
||||
for repre in representations:
|
||||
if repre['ext'] in plugin_attrs["ext_filter"]:
|
||||
tags = repre.get("tags", [])
|
||||
|
||||
self.log.info("Try repre: {}".format(repre))
|
||||
|
||||
if "review" in tags:
|
||||
|
||||
staging_dir = repre["stagingDir"]
|
||||
|
||||
for name, profile in output_profiles.items():
|
||||
ext = profile.get("ext", None)
|
||||
if not ext:
|
||||
ext = "mov"
|
||||
self.log.warning("`ext` attribute not in output profile. Setting to default ext: `mov`")
|
||||
if "mov" not in repre['ext']:
|
||||
# get output presets and loop them
|
||||
collections, remainder = clique.assemble(
|
||||
repre["files"])
|
||||
|
||||
full_input_path = os.path.join(
|
||||
staging_dir, collections[0].format(
|
||||
'{head}{padding}{tail}')
|
||||
)
|
||||
|
||||
filename = collections[0].format('{head}')
|
||||
if filename.endswith('.'):
|
||||
filename = filename[:-1]
|
||||
else:
|
||||
full_input_path = os.path.join(
|
||||
staging_dir, repre["files"])
|
||||
filename = repre["files"].split(".")[0]
|
||||
|
||||
repr_file = filename + "_{0}.{1}".format(name, ext)
|
||||
|
||||
full_output_path = os.path.join(staging_dir, repr_file)
|
||||
|
||||
self.log.info("input {}".format(full_input_path))
|
||||
self.log.info("output {}".format(full_output_path))
|
||||
|
||||
repre_new = repre.copy()
|
||||
|
||||
self.log.debug("Profile name: {}".format(name))
|
||||
|
||||
new_tags = tags[:]
|
||||
p_tags = profile.get('tags', [])
|
||||
self.log.info("p_tags: `{}`".format(p_tags))
|
||||
# add families
|
||||
[instance.data["families"].append(t) for t in p_tags
|
||||
if t not in instance.data["families"]]
|
||||
# add to
|
||||
[new_tags.append(t) for t in p_tags
|
||||
if t not in new_tags]
|
||||
|
||||
self.log.info("new_tags: `{}`".format(new_tags))
|
||||
|
||||
input_args = []
|
||||
|
||||
# overrides output file
|
||||
input_args.append("-y")
|
||||
|
||||
# preset's input data
|
||||
input_args.extend(profile.get('input', []))
|
||||
|
||||
# necessary input data
|
||||
# adds start arg only if image sequence
|
||||
if "mov" not in repre_new['ext']:
|
||||
input_args.append("-start_number {}".format(
|
||||
start_frame))
|
||||
|
||||
input_args.append("-i {}".format(full_input_path))
|
||||
input_args.append("-framerate {}".format(fps))
|
||||
|
||||
output_args = []
|
||||
# preset's output data
|
||||
output_args.extend(profile.get('output', []))
|
||||
|
||||
# output filename
|
||||
output_args.append(full_output_path)
|
||||
mov_args = [
|
||||
"ffmpeg",
|
||||
" ".join(input_args),
|
||||
" ".join(output_args)
|
||||
]
|
||||
subprocess_mov = " ".join(mov_args)
|
||||
|
||||
# run subprocess
|
||||
sub_proc = subprocess.Popen(subprocess_mov)
|
||||
sub_proc.wait()
|
||||
|
||||
if not os.path.isfile(full_output_path):
|
||||
self.log.error(
|
||||
"Quicktime wasn't created succesfully")
|
||||
|
||||
# create representation data
|
||||
repre_new.update({
|
||||
'name': name,
|
||||
'ext': ext,
|
||||
'files': repr_file,
|
||||
"tags": new_tags,
|
||||
"outputName": name
|
||||
})
|
||||
repre_new.pop("preview")
|
||||
repre_new.pop("thumbnail")
|
||||
|
||||
# adding representation
|
||||
representations_new.append(repre_new)
|
||||
else:
|
||||
representations_new.append(repre)
|
||||
else:
|
||||
representations_new.append(repre)
|
||||
|
||||
self.log.debug(
|
||||
"new representations: {}".format(representations_new))
|
||||
instance.data["representations"] = representations_new
|
||||
|
||||
self.log.debug("Families Out: `{}`".format(instance.data["families"]))
|
||||
|
|
@ -343,6 +343,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
template_data["representation"] = repre['ext']
|
||||
|
||||
if repre.get("outputName"):
|
||||
template_data["output"] = repre['outputName']
|
||||
|
||||
src = os.path.join(stagingdir, fname)
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
dst = os.path.normpath(
|
||||
|
|
|
|||
|
|
@ -44,10 +44,7 @@ class PremierePro(api.Action):
|
|||
env = acre.merge(env, current_env=dict(os.environ))
|
||||
|
||||
if not env.get('AVALON_WORKDIR', None):
|
||||
pype.load_data_from_templates()
|
||||
os.environ["AVALON_WORKDIR"] = pype.get_workdir_template(
|
||||
pype.Anatomy)
|
||||
pype.reset_data_from_templates()
|
||||
os.environ["AVALON_WORKDIR"] = pype.get_workdir_template()
|
||||
|
||||
env.update(dict(os.environ))
|
||||
|
||||
|
|
|
|||
|
|
@ -29,12 +29,23 @@ class AbcLoader(pype.maya.plugin.ReferenceLoader):
|
|||
namespace=namespace,
|
||||
sharedReferenceFile=False,
|
||||
groupReference=True,
|
||||
groupName="{}:{}".format(namespace, name),
|
||||
groupName=groupName,
|
||||
reference=True,
|
||||
returnNewNodes=True)
|
||||
|
||||
nodes.pop(0)
|
||||
roots = set()
|
||||
for node in nodes:
|
||||
try:
|
||||
roots.add(cmds.ls(node, long=True)[0].split('|')[2])
|
||||
except:
|
||||
pass
|
||||
cmds.parent(roots, world=True)
|
||||
cmds.makeIdentity(groupName, apply=False, rotate=True,
|
||||
translate=True, scale=True)
|
||||
cmds.parent(roots, groupName)
|
||||
|
||||
nodes.append(groupName)
|
||||
|
||||
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
|
||||
colors = presets['plugins']['maya']['load']['colors']
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader):
|
|||
"""Load the model"""
|
||||
|
||||
families = ["model"]
|
||||
representations = ["ma"]
|
||||
representations = ["ma", "abc"]
|
||||
tool_names = ["loader"]
|
||||
|
||||
label = "Reference Model"
|
||||
|
|
@ -25,15 +25,30 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader):
|
|||
with maya.maintained_selection():
|
||||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
cmds.loadPlugin("AbcImport.mll", quiet=True)
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True,
|
||||
sharedReferenceFile=False,
|
||||
groupReference=True,
|
||||
groupName=groupName)
|
||||
groupName="{}:{}".format(namespace, name),
|
||||
reference=True,
|
||||
returnNewNodes=True)
|
||||
|
||||
namespace = cmds.referenceQuery(nodes[0], namespace=True)
|
||||
|
||||
nodes.pop(1)
|
||||
roots = set()
|
||||
for node in nodes:
|
||||
try:
|
||||
roots.add(cmds.ls(node, long=True)[0].split('|')[2])
|
||||
except:
|
||||
pass
|
||||
cmds.parent(roots, world=True)
|
||||
cmds.makeIdentity(groupName, apply=False, rotate=True,
|
||||
translate=True, scale=True)
|
||||
cmds.parent(roots, groupName)
|
||||
|
||||
nodes.append(groupName)
|
||||
|
||||
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
|
||||
colors = presets['plugins']['maya']['load']['colors']
|
||||
|
|
@ -43,9 +58,9 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader):
|
|||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
self[:] = nodes
|
||||
self[:] = nodes
|
||||
|
||||
return nodes
|
||||
return nodes
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
@ -150,49 +165,59 @@ class GpuCacheLoader(api.Loader):
|
|||
pass
|
||||
|
||||
|
||||
class AbcModelLoader(pype.maya.plugin.ReferenceLoader):
|
||||
"""Specific loader of Alembic for the studio.animation family"""
|
||||
|
||||
families = ["model"]
|
||||
representations = ["abc"]
|
||||
tool_names = ["loader"]
|
||||
|
||||
label = "Reference Model"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def process_reference(self, context, name, namespace, data):
|
||||
|
||||
import maya.cmds as cmds
|
||||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
cmds.loadPlugin("AbcImport.mll", quiet=True)
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
sharedReferenceFile=False,
|
||||
groupReference=True,
|
||||
groupName=groupName,
|
||||
reference=True,
|
||||
returnNewNodes=True)
|
||||
|
||||
namespace = cmds.referenceQuery(nodes[0], namespace=True)
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
|
||||
cmds.makeIdentity(groupName, apply=False, rotate=True,
|
||||
translate=True, scale=True)
|
||||
|
||||
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
|
||||
colors = presets['plugins']['maya']['load']['colors']
|
||||
c = colors.get('model')
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
return nodes
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
# class AbcModelLoader(pype.maya.plugin.ReferenceLoader):
|
||||
# """Specific loader of Alembic for the studio.animation family"""
|
||||
#
|
||||
# families = ["model"]
|
||||
# representations = ["abc"]
|
||||
# tool_names = ["loader"]
|
||||
#
|
||||
# label = "Reference Model"
|
||||
# order = -10
|
||||
# icon = "code-fork"
|
||||
# color = "orange"
|
||||
#
|
||||
# def process_reference(self, context, name, namespace, data):
|
||||
#
|
||||
# import maya.cmds as cmds
|
||||
#
|
||||
# groupName = "{}:{}".format(namespace, name)
|
||||
# cmds.loadPlugin("AbcImport.mll", quiet=True)
|
||||
# nodes = cmds.file(self.fname,
|
||||
# namespace=namespace,
|
||||
# sharedReferenceFile=False,
|
||||
# groupReference=True,
|
||||
# groupName="{}:{}".format(namespace, name),
|
||||
# reference=True,
|
||||
# returnNewNodes=True)
|
||||
#
|
||||
# namespace = cmds.referenceQuery(nodes[0], namespace=True)
|
||||
#
|
||||
# nodes.pop(0)
|
||||
# roots = set()
|
||||
# for node in nodes:
|
||||
# try:
|
||||
# roots.add(cmds.ls(node, long=True)[0].split('|')[2])
|
||||
# except:
|
||||
# pass
|
||||
# cmds.parent(roots, world=True)
|
||||
# cmds.makeIdentity(groupName, apply=False, rotate=True,
|
||||
# translate=True, scale=True)
|
||||
# cmds.parent(roots, groupName)
|
||||
#
|
||||
# nodes.append(groupName)
|
||||
#
|
||||
# presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
|
||||
# colors = presets['plugins']['maya']['load']['colors']
|
||||
# c = colors.get('model')
|
||||
# if c is not None:
|
||||
# cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
# cmds.setAttr(groupName + ".outlinerColor",
|
||||
# c[0], c[1], c[2])
|
||||
#
|
||||
# self[:] = nodes
|
||||
#
|
||||
# return nodes
|
||||
#
|
||||
# def switch(self, container, representation):
|
||||
# self.update(container, representation)
|
||||
|
|
|
|||
95
pype/plugins/maya/publish/validate_attributes.py
Normal file
95
pype/plugins/maya/publish/validate_attributes.py
Normal file
|
|
@ -0,0 +1,95 @@
|
|||
import pymel.core as pm
|
||||
|
||||
import pyblish.api
|
||||
import pype.api
|
||||
|
||||
|
||||
class ValidateAttributes(pyblish.api.ContextPlugin):
|
||||
"""Ensure attributes are consistent.
|
||||
|
||||
Attributes to validate and their values comes from the
|
||||
"maya/attributes.json" preset, which needs this structure:
|
||||
{
|
||||
"family": {
|
||||
"node_name.attribute_name": attribute_value
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
order = pype.api.ValidateContentsOrder
|
||||
label = "Attributes"
|
||||
hosts = ["maya"]
|
||||
actions = [pype.api.RepairContextAction]
|
||||
|
||||
def process(self, context):
|
||||
# Check for preset existence.
|
||||
if not context.data["presets"]["maya"].get("attributes"):
|
||||
return
|
||||
|
||||
invalid = self.get_invalid(context, compute=True)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Found attributes with invalid values: {}".format(invalid)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, context, compute=False):
|
||||
invalid = context.data.get("invalid_attributes", [])
|
||||
if compute:
|
||||
invalid = cls.get_invalid_attributes(context)
|
||||
|
||||
return invalid
|
||||
|
||||
@classmethod
|
||||
def get_invalid_attributes(cls, context):
|
||||
presets = context.data["presets"]["maya"]["attributes"]
|
||||
invalid_attributes = []
|
||||
for instance in context:
|
||||
# Filter publisable instances.
|
||||
if not instance.data["publish"]:
|
||||
continue
|
||||
|
||||
# Filter families.
|
||||
families = [instance.data["family"]]
|
||||
families += instance.data.get("families", [])
|
||||
families = list(set(families) & set(presets.keys()))
|
||||
if not families:
|
||||
continue
|
||||
|
||||
# Get all attributes to validate.
|
||||
attributes = {}
|
||||
for family in families:
|
||||
for preset in presets[family]:
|
||||
[node_name, attribute_name] = preset.split(".")
|
||||
attributes.update(
|
||||
{node_name: {attribute_name: presets[family][preset]}}
|
||||
)
|
||||
|
||||
# Get invalid attributes.
|
||||
nodes = [pm.PyNode(x) for x in instance]
|
||||
for node in nodes:
|
||||
name = node.name(stripNamespace=True)
|
||||
if name not in attributes.keys():
|
||||
continue
|
||||
|
||||
presets_to_validate = attributes[name]
|
||||
for attribute in node.listAttr():
|
||||
if attribute.attrName() in presets_to_validate:
|
||||
expected = presets_to_validate[attribute.attrName()]
|
||||
if attribute.get() != expected:
|
||||
invalid_attributes.append(
|
||||
{
|
||||
"attribute": attribute,
|
||||
"expected": expected,
|
||||
"current": attribute.get()
|
||||
}
|
||||
)
|
||||
|
||||
context.data["invalid_attributes"] = invalid_attributes
|
||||
return invalid_attributes
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
invalid = cls.get_invalid(instance)
|
||||
for data in invalid:
|
||||
data["attribute"].set(data["expected"])
|
||||
46
pype/plugins/nuke/_publish_unused/collect_render_target.py
Normal file
46
pype/plugins/nuke/_publish_unused/collect_render_target.py
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
@pyblish.api.log
|
||||
class CollectRenderTarget(pyblish.api.InstancePlugin):
|
||||
"""Collect families for all instances"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
label = "Collect Render Target"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
families = ['write']
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
node = instance[0]
|
||||
|
||||
self.log.info('processing {}'.format(node))
|
||||
|
||||
families = []
|
||||
if instance.data.get('families'):
|
||||
families += instance.data['families']
|
||||
|
||||
# set for ftrack to accept
|
||||
# instance.data["families"] = ["ftrack"]
|
||||
|
||||
if node["render"].value():
|
||||
# dealing with local/farm rendering
|
||||
if node["render_farm"].value():
|
||||
families.append("render.farm")
|
||||
else:
|
||||
families.append("render.local")
|
||||
else:
|
||||
families.append("render.frames")
|
||||
# to ignore staging dir op in integrate
|
||||
instance.data['transfer'] = False
|
||||
|
||||
families.append('ftrack')
|
||||
|
||||
instance.data["families"] = families
|
||||
|
||||
# Sort/grouped by family (preserving local index)
|
||||
instance.context[:] = sorted(instance.context, key=self.sort_by_family)
|
||||
|
||||
def sort_by_family(self, instance):
|
||||
"""Sort by family"""
|
||||
return instance.data.get("families", instance.data.get("family"))
|
||||
|
|
@ -27,8 +27,8 @@ class ExtractScript(pype.api.Extractor):
|
|||
shutil.copy(current_script, path)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
instance.data["representations"] = list()
|
||||
|
||||
representation = {
|
||||
'name': 'nk',
|
||||
'ext': '.nk',
|
||||
|
|
@ -68,7 +68,7 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
"avalonKnob": avalon_knob_data,
|
||||
"publish": node.knob('publish').value(),
|
||||
"step": 1,
|
||||
"fps": int(nuke.root()['fps'].value())
|
||||
"fps": nuke.root()['fps'].value()
|
||||
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.CollectorOrder + 0.1
|
||||
label = "Collect Writes"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
families = ["render.local", "render", "render.farm"]
|
||||
families = ["render", "render.local", "render.farm"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -96,5 +96,4 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
"colorspace": node["colorspace"].value(),
|
||||
})
|
||||
|
||||
|
||||
self.log.debug("instance.data: {}".format(instance.data))
|
||||
|
|
|
|||
|
|
@ -21,7 +21,6 @@ class NukeRenderLocal(pype.api.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
node = instance[0]
|
||||
context = instance.context
|
||||
|
||||
self.log.debug("instance collected: {}".format(instance.data))
|
||||
|
||||
|
|
|
|||
|
|
@ -2,10 +2,9 @@ import os
|
|||
import nuke
|
||||
import pyblish.api
|
||||
import pype
|
||||
from pype.vendor import ffmpeg
|
||||
|
||||
|
||||
class ExtractDataForReview(pype.api.Extractor):
|
||||
class ExtractReviewData(pype.api.Extractor):
|
||||
"""Extracts movie and thumbnail with baked in luts
|
||||
|
||||
must be run after extract_render_local.py
|
||||
|
|
@ -13,8 +12,7 @@ class ExtractDataForReview(pype.api.Extractor):
|
|||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder + 0.01
|
||||
label = "Extract Review"
|
||||
optional = True
|
||||
label = "Extract Review Data"
|
||||
|
||||
families = ["review"]
|
||||
hosts = ["nuke"]
|
||||
|
|
@ -35,63 +33,15 @@ class ExtractDataForReview(pype.api.Extractor):
|
|||
if "still" not in instance.data["families"]:
|
||||
self.render_review_representation(instance,
|
||||
representation="mov")
|
||||
self.log.debug("review mov:")
|
||||
self.transcode_mov(instance)
|
||||
self.log.debug("instance.data: {}".format(instance.data))
|
||||
self.render_review_representation(instance,
|
||||
representation="jpeg")
|
||||
else:
|
||||
self.log.debug("instance: {}".format(instance))
|
||||
self.render_review_representation(instance, representation="jpeg")
|
||||
|
||||
# Restore selection
|
||||
[i["selected"].setValue(False) for i in nuke.allNodes()]
|
||||
[i["selected"].setValue(True) for i in selection]
|
||||
|
||||
def transcode_mov(self, instance):
|
||||
collection = instance.data["collection"]
|
||||
stagingDir = instance.data["stagingDir"].replace("\\", "/")
|
||||
file_name = collection.format("{head}mov")
|
||||
|
||||
review_mov = os.path.join(stagingDir, file_name).replace("\\", "/")
|
||||
|
||||
self.log.info("transcoding review mov: {0}".format(review_mov))
|
||||
if instance.data.get("baked_colorspace_movie"):
|
||||
input_movie = instance.data["baked_colorspace_movie"]
|
||||
out, err = (
|
||||
ffmpeg
|
||||
.input(input_movie)
|
||||
.output(
|
||||
review_mov,
|
||||
pix_fmt='yuv420p',
|
||||
crf=18,
|
||||
timecode="00:00:00:01"
|
||||
)
|
||||
.overwrite_output()
|
||||
.run()
|
||||
)
|
||||
|
||||
self.log.debug("Removing `{0}`...".format(
|
||||
instance.data["baked_colorspace_movie"]))
|
||||
os.remove(instance.data["baked_colorspace_movie"])
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'review',
|
||||
'ext': 'mov',
|
||||
'files': file_name,
|
||||
"stagingDir": stagingDir,
|
||||
"anatomy_template": "render",
|
||||
"thumbnail": False,
|
||||
"preview": True,
|
||||
'startFrameReview': instance.data['startFrame'],
|
||||
'endFrameReview': instance.data['endFrame'],
|
||||
'frameRate': instance.context.data["framerate"]
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
def render_review_representation(self,
|
||||
instance,
|
||||
representation="mov"):
|
||||
|
|
@ -172,6 +122,7 @@ class ExtractDataForReview(pype.api.Extractor):
|
|||
temporary_nodes.append(write_node)
|
||||
thumbnail = False
|
||||
preview = True
|
||||
tags = ["review"]
|
||||
|
||||
elif representation in "jpeg":
|
||||
file = fhead + "jpeg"
|
||||
|
|
@ -184,29 +135,31 @@ class ExtractDataForReview(pype.api.Extractor):
|
|||
temporary_nodes.append(write_node)
|
||||
thumbnail = True
|
||||
preview = False
|
||||
tags = ["thumbnail"]
|
||||
|
||||
# retime for
|
||||
first_frame = int(last_frame) / 2
|
||||
last_frame = int(last_frame) / 2
|
||||
# add into files for integration as representation
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
repre = {
|
||||
'name': representation,
|
||||
'ext': representation,
|
||||
'files': file,
|
||||
"stagingDir": stagingDir,
|
||||
"anatomy_template": "render",
|
||||
"thumbnail": thumbnail,
|
||||
"preview": preview
|
||||
}
|
||||
instance.data["representations"].append(repre)
|
||||
repre = {
|
||||
'name': representation,
|
||||
'ext': representation,
|
||||
'files': file,
|
||||
"stagingDir": stagingDir,
|
||||
"startFrame": first_frame,
|
||||
"endFrame": last_frame,
|
||||
"anatomy_template": "render",
|
||||
"thumbnail": thumbnail,
|
||||
"preview": preview,
|
||||
"tags": tags
|
||||
}
|
||||
instance.data["representations"].append(repre)
|
||||
|
||||
# Render frames
|
||||
nuke.execute(write_node.name(), int(first_frame), int(last_frame))
|
||||
|
||||
self.log.debug("representations: {}".format(instance.data["representations"]))
|
||||
|
||||
# Clean up
|
||||
for node in temporary_nodes:
|
||||
nuke.delete(node)
|
||||
|
|
@ -1,5 +1,4 @@
|
|||
import pyblish.api
|
||||
import pype.api as pype
|
||||
|
||||
class CollectCurrentFile(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context"""
|
||||
|
|
@ -8,10 +7,7 @@ class CollectCurrentFile(pyblish.api.ContextPlugin):
|
|||
|
||||
|
||||
def process(self, context):
|
||||
"""Todo, inject the current working file"""
|
||||
|
||||
project = context.data('activeProject')
|
||||
context.data["currentFile"] = path = project.path()
|
||||
context.data["version"] = pype.get_version_from_path(path)
|
||||
self.log.info("currentFile: {}".format(context.data["currentFile"]))
|
||||
self.log.info("version: {}".format(context.data["version"]))
|
||||
|
|
|
|||
15
pype/plugins/nukestudio/publish/collect_workfile_version.py
Normal file
15
pype/plugins/nukestudio/publish/collect_workfile_version.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
import pyblish.api
|
||||
import pype.api as pype
|
||||
|
||||
class CollectWorkfileVersion(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file version into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.1
|
||||
label = "Collect workfile version"
|
||||
|
||||
def process(self, context):
|
||||
|
||||
project = context.data('activeProject')
|
||||
path = project.path()
|
||||
context.data["version"] = pype.get_version_from_path(path)
|
||||
self.log.info("version: {}".format(context.data["version"]))
|
||||
74
pype/plugins/nukestudio/publish/validate_version.py
Normal file
74
pype/plugins/nukestudio/publish/validate_version.py
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
import pyblish
|
||||
from avalon import io
|
||||
from pype.action import get_errored_instances_from_context
|
||||
import pype.api as pype
|
||||
|
||||
@pyblish.api.log
|
||||
class RepairNukestudioVersionUp(pyblish.api.Action):
|
||||
label = "Version Up Workfile"
|
||||
on = "failed"
|
||||
icon = "wrench"
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
errored_instances = get_errored_instances_from_context(context)
|
||||
|
||||
# Apply pyblish logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
|
||||
|
||||
if instances:
|
||||
project = context.data["activeProject"]
|
||||
path = context.data.get("currentFile")
|
||||
|
||||
new_path = pype.version_up(path)
|
||||
|
||||
if project:
|
||||
project.saveAs(new_path)
|
||||
|
||||
self.log.info("Project workfile version was fixed")
|
||||
|
||||
|
||||
class ValidateVersion(pyblish.api.InstancePlugin):
|
||||
"""Validate clip's versions.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["plate"]
|
||||
label = "Validate Version"
|
||||
actions = [RepairNukestudioVersionUp]
|
||||
hosts = ["nukestudio"]
|
||||
|
||||
def process(self, instance):
|
||||
version = int(instance.data.get("version", 0))
|
||||
asset_name = instance.data.get("asset", None)
|
||||
subset_name = instance.data.get("subset", None)
|
||||
|
||||
assert version, "The file is missing version string! example: filename_v001.hrox `{}`"
|
||||
|
||||
self.log.debug("Collected version: `{0}`".format(version))
|
||||
|
||||
found_v = 0
|
||||
try:
|
||||
io.install()
|
||||
project = io.find_one({"type": "project"})
|
||||
|
||||
asset = io.find_one({"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project["_id"]})
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"parent": asset["_id"],
|
||||
"name": subset_name})
|
||||
|
||||
version_db = io.find_one({
|
||||
'type': 'version',
|
||||
'parent': subset["_id"],
|
||||
'name': version
|
||||
}) or {}
|
||||
found_v = version_db.get("name", 0)
|
||||
self.log.debug("Found version: `{0}`".format(found_v))
|
||||
except Exception as e:
|
||||
self.log.debug("Problem to get data from database for asset `{0}` subset `{1}`. Error: `{2}`".format(asset_name, subset_name, e))
|
||||
|
||||
assert (found_v != version), "Version must not be the same as in database `{0}`, Versions file: `{1}`, db: `{2}`".format(asset_name, version, found_v)
|
||||
23
pype/plugins/nukestudio/publish/version_up_workfile.py
Normal file
23
pype/plugins/nukestudio/publish/version_up_workfile.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
from pyblish import api
|
||||
import pype.api as pype
|
||||
|
||||
|
||||
class VersionUpWorkfile(api.ContextPlugin):
|
||||
"""Save as new workfile version"""
|
||||
|
||||
order = api.IntegratorOrder + 10.1
|
||||
label = "Version-up Workfile"
|
||||
hosts = ["nukestudio"]
|
||||
|
||||
optional = True
|
||||
active = True
|
||||
|
||||
def process(self, context):
|
||||
project = context.data["activeProject"]
|
||||
path = context.data.get("currentFile")
|
||||
new_path = pype.version_up(path)
|
||||
|
||||
if project:
|
||||
project.saveAs(new_path)
|
||||
|
||||
self.log.info("Project workfile was versioned up")
|
||||
|
|
@ -96,9 +96,6 @@ def install():
|
|||
avalon.data["familiesStateDefault"] = False
|
||||
avalon.data["familiesStateToggled"] = family_states
|
||||
|
||||
# load data from templates
|
||||
api.load_data_from_templates()
|
||||
|
||||
# synchronize extensions
|
||||
extensions_sync()
|
||||
message(title="pyblish_paths", message=str(reg_paths), level="info")
|
||||
|
|
@ -109,6 +106,3 @@ def uninstall():
|
|||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
|
||||
# reset data from templates
|
||||
api.reset_data_from_templates()
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
from pype import api as pype
|
||||
from pypeapp import Anatomy, config
|
||||
|
||||
|
||||
log = pype.Logger().get_logger(__name__, "premiere")
|
||||
|
||||
|
||||
def get_anatomy(**kwarg):
|
||||
return pype.Anatomy
|
||||
return Anatomy()
|
||||
|
||||
|
||||
def get_dataflow(**kwarg):
|
||||
|
|
@ -15,7 +17,8 @@ def get_dataflow(**kwarg):
|
|||
assert any([host, cls]), log.error("premiera.templates.get_dataflow():"
|
||||
"Missing mandatory kwargs `host`, `cls`")
|
||||
|
||||
pr_dataflow = getattr(pype.Dataflow, str(host), None)
|
||||
presets = config.get_init_presets()
|
||||
pr_dataflow = getattr(presets["dataflow"], str(host), None)
|
||||
pr_dataflow_node = getattr(pr_dataflow.nodes, str(cls), None)
|
||||
if preset:
|
||||
pr_dataflow_node = getattr(pr_dataflow_node, str(preset), None)
|
||||
|
|
@ -32,7 +35,8 @@ def get_colorspace(**kwarg):
|
|||
assert any([host, cls]), log.error("premiera.templates.get_colorspace():"
|
||||
"Missing mandatory kwargs `host`, `cls`")
|
||||
|
||||
pr_colorspace = getattr(pype.Colorspace, str(host), None)
|
||||
presets = config.get_init_presets()
|
||||
pr_colorspace = getattr(presets["colorspace"], str(host), None)
|
||||
pr_colorspace_node = getattr(pr_colorspace, str(cls), None)
|
||||
if preset:
|
||||
pr_colorspace_node = getattr(pr_colorspace_node, str(preset), None)
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import sys
|
|||
from avalon import io, api as avalon, lib as avalonlib
|
||||
from . import lib
|
||||
# from pypeapp.api import (Templates, Logger, format)
|
||||
from pypeapp import Logger, config, Anatomy
|
||||
from pypeapp import Logger, Anatomy
|
||||
log = Logger().get_logger(__name__, os.getenv("AVALON_APP", "pype-config"))
|
||||
|
||||
|
||||
|
|
@ -17,63 +17,6 @@ def set_session():
|
|||
self.SESSION = avalon.session
|
||||
|
||||
|
||||
def load_data_from_templates():
|
||||
"""
|
||||
Load Presets and Anatomy `contextual` data as singleton object
|
||||
[info](https://en.wikipedia.org/wiki/Singleton_pattern)
|
||||
|
||||
Returns:
|
||||
singleton: adding data to sharable object variable
|
||||
|
||||
"""
|
||||
|
||||
from . import api
|
||||
if not any([
|
||||
api.Dataflow,
|
||||
api.Anatomy,
|
||||
api.Colorspace
|
||||
]
|
||||
):
|
||||
presets = config.get_presets()
|
||||
anatomy = Anatomy()
|
||||
|
||||
try:
|
||||
# try if it is not in projects custom directory
|
||||
# `{PYPE_PROJECT_CONFIGS}/[PROJECT_NAME]/init.json`
|
||||
# init.json define preset names to be used
|
||||
p_init = presets["init"]
|
||||
colorspace = presets["colorspace"][p_init["colorspace"]]
|
||||
dataflow = presets["dataflow"][p_init["dataflow"]]
|
||||
except KeyError:
|
||||
log.warning("No projects custom preset available...")
|
||||
colorspace = presets["colorspace"]["default"]
|
||||
dataflow = presets["dataflow"]["default"]
|
||||
log.info("Presets `colorspace` and `dataflow` loaded from `default`...")
|
||||
|
||||
api.Anatomy = anatomy
|
||||
api.Dataflow = dataflow
|
||||
api.Colorspace = colorspace
|
||||
|
||||
log.info("Data from templates were Loaded...")
|
||||
|
||||
|
||||
def reset_data_from_templates():
|
||||
"""
|
||||
Clear Templates `contextual` data from singleton
|
||||
object variable
|
||||
|
||||
Returns:
|
||||
singleton: clearing data to None
|
||||
|
||||
"""
|
||||
|
||||
from . import api
|
||||
api.Dataflow = None
|
||||
api.Anatomy = None
|
||||
api.Colorspace = None
|
||||
log.info("Data from templates were Unloaded...")
|
||||
|
||||
|
||||
def get_version_from_path(file):
|
||||
"""
|
||||
Finds version number in file path string
|
||||
|
|
@ -85,7 +28,7 @@ def get_version_from_path(file):
|
|||
v: version number in string ('001')
|
||||
|
||||
"""
|
||||
pattern = re.compile(r"[\.\_]v([0-9]*)")
|
||||
pattern = re.compile(r"[\._]v([0-9]*)")
|
||||
try:
|
||||
v = pattern.findall(file)[0]
|
||||
return v
|
||||
|
|
@ -265,7 +208,9 @@ def set_avalon_workdir(project=None,
|
|||
if self.SESSION is None:
|
||||
set_session()
|
||||
|
||||
awd = self.SESSION.get("AVALON_WORKDIR", None) or os.getenv("AVALON_WORKDIR", None)
|
||||
awd = self.SESSION.get("AVALON_WORKDIR", None) or \
|
||||
os.getenv("AVALON_WORKDIR", None)
|
||||
|
||||
data = get_context_data(project, hierarchy, asset, task)
|
||||
|
||||
if (not awd) or ("{" not in awd):
|
||||
|
|
@ -280,7 +225,7 @@ def set_avalon_workdir(project=None,
|
|||
|
||||
def get_workdir_template(data=None):
|
||||
"""
|
||||
Obtain workdir templated path from api.Anatomy singleton
|
||||
Obtain workdir templated path from Anatomy()
|
||||
|
||||
Args:
|
||||
data (dict, optional): basic contextual data
|
||||
|
|
@ -288,12 +233,8 @@ def get_workdir_template(data=None):
|
|||
Returns:
|
||||
string: template path
|
||||
"""
|
||||
from . import api
|
||||
|
||||
""" Installs singleton data """
|
||||
load_data_from_templates()
|
||||
|
||||
anatomy = api.Anatomy
|
||||
anatomy = Anatomy()
|
||||
anatomy_filled = anatomy.format(data or get_context_data())
|
||||
|
||||
try:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue