Merge branch 'develop' into bugfix/PYPE-428-dazzle-feedback-publish-errors

This commit is contained in:
jezschaj 2019-07-17 11:37:55 +02:00
commit 559101217d
19 changed files with 326 additions and 203 deletions

View file

@ -7,11 +7,6 @@ from .lib import filter_pyblish_plugins
import logging
log = logging.getLogger(__name__)
# # do not delete these are mandatory
Anatomy = None
Dataflow = None
Colorspace = None
PACKAGE_DIR = os.path.dirname(__file__)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
@ -26,6 +21,7 @@ def install():
pyblish.register_discovery_filter(filter_pyblish_plugins)
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
def uninstall():
log.info("Deregistering global plug-ins..")
pyblish.deregister_plugin_path(PUBLISH_PATH)

View file

@ -18,15 +18,8 @@ from .action import (
from pypeapp import Logger
from . import (
Anatomy,
Colorspace,
Dataflow
)
from .templates import (
load_data_from_templates,
reset_data_from_templates,
get_project_name,
get_project_code,
get_hierarchy,
@ -40,6 +33,7 @@ from .templates import (
)
from .lib import (
version_up,
get_handle_irregular,
get_project_data,
get_asset_data,
@ -65,11 +59,6 @@ __all__ = [
"ValidationException",
# contectual templates
# get data to preloaded templates
"load_data_from_templates",
"reset_data_from_templates",
# get contextual data
"get_handle_irregular",
"get_project_data",
@ -89,9 +78,4 @@ __all__ = [
"get_data_hierarchical_attr",
"get_avalon_project_template",
# preloaded templates
"Anatomy",
"Colorspace",
"Dataflow",
]

View file

@ -50,9 +50,6 @@ def install():
avalon.data["familiesStateDefault"] = False
avalon.data["familiesStateToggled"] = family_states
# load data from templates
api.load_data_from_templates()
# launch pico server
pico_server_launch()

View file

@ -1,10 +1,12 @@
from pype import api as pype
from pypeapp import Anatomy, config
log = pype.Logger().get_logger(__name__, "aport")
def get_anatomy(**kwarg):
return pype.Anatomy
return Anatomy()
def get_dataflow(**kwarg):
@ -15,7 +17,8 @@ def get_dataflow(**kwarg):
assert any([host, cls]), log.error("aport.templates.get_dataflow():"
"Missing mandatory kwargs `host`, `cls`")
aport_dataflow = getattr(pype.Dataflow, str(host), None)
presets = config.get_init_presets()
aport_dataflow = getattr(presets["dataflow"], str(host), None)
aport_dataflow_node = getattr(aport_dataflow.nodes, str(cls), None)
if preset:
aport_dataflow_node = getattr(aport_dataflow_node, str(preset), None)
@ -32,7 +35,8 @@ def get_colorspace(**kwarg):
assert any([host, cls]), log.error("aport.templates.get_colorspace():"
"Missing mandatory kwargs `host`, `cls`")
aport_colorspace = getattr(pype.Colorspace, str(host), None)
presets = config.get_init_presets()
aport_colorspace = getattr(presets["colorspace"], str(host), None)
aport_colorspace_node = getattr(aport_colorspace, str(cls), None)
if preset:
aport_colorspace_node = getattr(aport_colorspace_node, str(preset), None)

View file

@ -132,9 +132,6 @@ def install():
menu.install()
# load data from templates
api.load_data_from_templates()
# Workfiles.
launch_workfiles = os.environ.get("WORKFILES_STARTUP")
@ -156,9 +153,6 @@ def uninstall():
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
# reset data from templates
api.reset_data_from_templates()
def on_pyblish_instance_toggled(instance, old_value, new_value):
"""Toggle node passthrough states on instance toggles."""

View file

@ -7,8 +7,9 @@ import avalon.nuke
import pype.api as pype
import nuke
from .templates import (
get_dataflow,
get_colorspace
get_colorspace_preset,
get_node_dataflow_preset,
get_node_colorspace_preset
)
from pypeapp import Logger
@ -121,8 +122,8 @@ def get_render_path(node):
"preset": data['avalon']['families']
}
nuke_dataflow_writes = get_dataflow(**data_preset)
nuke_colorspace_writes = get_colorspace(**data_preset)
nuke_dataflow_writes = get_node_dataflow_preset(**data_preset)
nuke_colorspace_writes = get_node_colorspace_preset(**data_preset)
application = lib.get_application(os.environ["AVALON_APP_NAME"])
data.update({
@ -172,8 +173,8 @@ def script_name():
def create_write_node(name, data):
nuke_dataflow_writes = get_dataflow(**data)
nuke_colorspace_writes = get_colorspace(**data)
nuke_dataflow_writes = get_node_dataflow_preset(**data)
nuke_colorspace_writes = get_node_colorspace_preset(**data)
application = lib.get_application(os.environ["AVALON_APP_NAME"])
try:
@ -311,9 +312,8 @@ def set_writes_colorspace(write_dict):
def set_colorspace():
from pype import api as pype
nuke_colorspace = pype.Colorspace.get("nuke", None)
nuke_colorspace = get_colorspace_preset().get("nuke", None)
try:
set_root_colorspace(nuke_colorspace["root"])
@ -637,8 +637,8 @@ def get_write_node_template_attr(node):
}
# get template data
nuke_dataflow_writes = get_dataflow(**data_preset)
nuke_colorspace_writes = get_colorspace(**data_preset)
nuke_dataflow_writes = get_node_dataflow_preset(**data_preset)
nuke_colorspace_writes = get_node_colorspace_preset(**data_preset)
# collecting correct data
correct_data = OrderedDict({

View file

@ -1,21 +1,33 @@
from pype import api as pype
from pypeapp import Anatomy, config
log = pype.Logger().get_logger(__name__, "nuke")
def get_anatomy(**kwarg):
return pype.Anatomy
return Anatomy()
def get_dataflow(**kwarg):
def get_dataflow_preset():
presets = config.get_init_presets()
return presets["dataflow"]
def get_colorspace_preset():
presets = config.get_init_presets()
return presets["colorspace"]
def get_node_dataflow_preset(**kwarg):
log.info(kwarg)
host = kwarg.get("host", "nuke")
cls = kwarg.get("class", None)
preset = kwarg.get("preset", None)
assert any([host, cls]), log.error("nuke.templates.get_dataflow():"
"Missing mandatory kwargs `host`, `cls`")
assert any([host, cls]), log.error("nuke.templates.get_node_dataflow_preset(): \
Missing mandatory kwargs `host`, `cls`")
nuke_dataflow = pype.Dataflow.get(str(host), None)
nuke_dataflow = get_dataflow_preset().get(str(host), None)
nuke_dataflow_nodes = nuke_dataflow.get('nodes', None)
nuke_dataflow_node = nuke_dataflow_nodes.get(str(cls), None)
@ -26,15 +38,15 @@ def get_dataflow(**kwarg):
return nuke_dataflow_node
def get_colorspace(**kwarg):
def get_node_colorspace_preset(**kwarg):
log.info(kwarg)
host = kwarg.get("host", "nuke")
cls = kwarg.get("class", None)
preset = kwarg.get("preset", None)
assert any([host, cls]), log.error("nuke.templates.get_colorspace():"
"Missing mandatory kwargs `host`, `cls`")
assert any([host, cls]), log.error("nuke.templates.get_node_colorspace_preset(): \
Missing mandatory kwargs `host`, `cls`")
nuke_colorspace = pype.Colorspace.get(str(host), None)
nuke_colorspace = get_colorspace_preset().get(str(host), None)
nuke_colorspace_node = nuke_colorspace.get(str(cls), None)
if preset:
nuke_colorspace_node = nuke_colorspace_node.get(str(preset), None)

View file

@ -55,9 +55,6 @@ def install(config):
menu_install()
# load data from templates
api.load_data_from_templates()
# Workfiles.
launch_workfiles = os.environ.get("WORKFILES_STARTUP")
@ -95,9 +92,6 @@ def uninstall():
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
# reset data from templates
api.reset_data_from_templates()
def _register_events():
avalon.on("taskChanged", _update_menu_task_label)

View file

@ -12,6 +12,5 @@ class CollectTemplates(pyblish.api.ContextPlugin):
label = "Collect Templates"
def process(self, context):
# pype.load_data_from_templates()
context.data['anatomy'] = Anatomy()
self.log.info("Anatomy templates collected...")

View file

@ -44,10 +44,7 @@ class PremierePro(api.Action):
env = acre.merge(env, current_env=dict(os.environ))
if not env.get('AVALON_WORKDIR', None):
pype.load_data_from_templates()
os.environ["AVALON_WORKDIR"] = pype.get_workdir_template(
pype.Anatomy)
pype.reset_data_from_templates()
os.environ["AVALON_WORKDIR"] = pype.get_workdir_template()
env.update(dict(os.environ))

View file

@ -9,7 +9,7 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader):
"""Load the model"""
families = ["model"]
representations = ["ma"]
representations = ["ma", "abc"]
tool_names = ["loader"]
label = "Reference Model"
@ -25,14 +25,18 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader):
with maya.maintained_selection():
groupName = "{}:{}".format(namespace, name)
cmds.loadPlugin("AbcImport.mll", quiet=True)
nodes = cmds.file(self.fname,
namespace=namespace,
sharedReferenceFile=False,
groupReference=True,
groupName=groupName,
groupName="{}:{}".format(namespace, name),
reference=True,
returnNewNodes=True)
nodes.pop(0)
namespace = cmds.referenceQuery(nodes[0], namespace=True)
nodes.pop(1)
roots = set()
for node in nodes:
try:
@ -54,9 +58,9 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader):
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
self[:] = nodes
self[:] = nodes
return nodes
return nodes
def switch(self, container, representation):
self.update(container, representation)
@ -161,59 +165,59 @@ class GpuCacheLoader(api.Loader):
pass
class AbcModelLoader(pype.maya.plugin.ReferenceLoader):
"""Specific loader of Alembic for the studio.animation family"""
families = ["model"]
representations = ["abc"]
tool_names = ["loader"]
label = "Reference Model"
order = -10
icon = "code-fork"
color = "orange"
def process_reference(self, context, name, namespace, data):
import maya.cmds as cmds
groupName = "{}:{}".format(namespace, name)
cmds.loadPlugin("AbcImport.mll", quiet=True)
nodes = cmds.file(self.fname,
namespace=namespace,
sharedReferenceFile=False,
groupReference=True,
groupName="{}:{}".format(namespace, name),
reference=True,
returnNewNodes=True)
namespace = cmds.referenceQuery(nodes[0], namespace=True)
nodes.pop(0)
roots = set()
for node in nodes:
try:
roots.add(cmds.ls(node, long=True)[0].split('|')[2])
except:
pass
cmds.parent(roots, world=True)
cmds.makeIdentity(groupName, apply=False, rotate=True,
translate=True, scale=True)
cmds.parent(roots, groupName)
nodes.append(groupName)
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
c = colors.get('model')
if c is not None:
cmds.setAttr(groupName + ".useOutlinerColor", 1)
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
self[:] = nodes
return roots
def switch(self, container, representation):
self.update(container, representation)
# class AbcModelLoader(pype.maya.plugin.ReferenceLoader):
# """Specific loader of Alembic for the studio.animation family"""
#
# families = ["model"]
# representations = ["abc"]
# tool_names = ["loader"]
#
# label = "Reference Model"
# order = -10
# icon = "code-fork"
# color = "orange"
#
# def process_reference(self, context, name, namespace, data):
#
# import maya.cmds as cmds
#
# groupName = "{}:{}".format(namespace, name)
# cmds.loadPlugin("AbcImport.mll", quiet=True)
# nodes = cmds.file(self.fname,
# namespace=namespace,
# sharedReferenceFile=False,
# groupReference=True,
# groupName="{}:{}".format(namespace, name),
# reference=True,
# returnNewNodes=True)
#
# namespace = cmds.referenceQuery(nodes[0], namespace=True)
#
# nodes.pop(0)
# roots = set()
# for node in nodes:
# try:
# roots.add(cmds.ls(node, long=True)[0].split('|')[2])
# except:
# pass
# cmds.parent(roots, world=True)
# cmds.makeIdentity(groupName, apply=False, rotate=True,
# translate=True, scale=True)
# cmds.parent(roots, groupName)
#
# nodes.append(groupName)
#
# presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
# colors = presets['plugins']['maya']['load']['colors']
# c = colors.get('model')
# if c is not None:
# cmds.setAttr(groupName + ".useOutlinerColor", 1)
# cmds.setAttr(groupName + ".outlinerColor",
# c[0], c[1], c[2])
#
# self[:] = nodes
#
# return nodes
#
# def switch(self, container, representation):
# self.update(container, representation)

View file

@ -0,0 +1,95 @@
import pymel.core as pm
import pyblish.api
import pype.api
class ValidateAttributes(pyblish.api.ContextPlugin):
"""Ensure attributes are consistent.
Attributes to validate and their values comes from the
"maya/attributes.json" preset, which needs this structure:
{
"family": {
"node_name.attribute_name": attribute_value
}
}
"""
order = pype.api.ValidateContentsOrder
label = "Attributes"
hosts = ["maya"]
actions = [pype.api.RepairContextAction]
def process(self, context):
# Check for preset existence.
if not context.data["presets"]["maya"].get("attributes"):
return
invalid = self.get_invalid(context, compute=True)
if invalid:
raise RuntimeError(
"Found attributes with invalid values: {}".format(invalid)
)
@classmethod
def get_invalid(cls, context, compute=False):
invalid = context.data.get("invalid_attributes", [])
if compute:
invalid = cls.get_invalid_attributes(context)
return invalid
@classmethod
def get_invalid_attributes(cls, context):
presets = context.data["presets"]["maya"]["attributes"]
invalid_attributes = []
for instance in context:
# Filter publisable instances.
if not instance.data["publish"]:
continue
# Filter families.
families = [instance.data["family"]]
families += instance.data.get("families", [])
families = list(set(families) & set(presets.keys()))
if not families:
continue
# Get all attributes to validate.
attributes = {}
for family in families:
for preset in presets[family]:
[node_name, attribute_name] = preset.split(".")
attributes.update(
{node_name: {attribute_name: presets[family][preset]}}
)
# Get invalid attributes.
nodes = [pm.PyNode(x) for x in instance]
for node in nodes:
name = node.name(stripNamespace=True)
if name not in attributes.keys():
continue
presets_to_validate = attributes[name]
for attribute in node.listAttr():
if attribute.attrName() in presets_to_validate:
expected = presets_to_validate[attribute.attrName()]
if attribute.get() != expected:
invalid_attributes.append(
{
"attribute": attribute,
"expected": expected,
"current": attribute.get()
}
)
context.data["invalid_attributes"] = invalid_attributes
return invalid_attributes
@classmethod
def repair(cls, instance):
invalid = cls.get_invalid(instance)
for data in invalid:
data["attribute"].set(data["expected"])

View file

@ -1,5 +1,4 @@
import pyblish.api
import pype.api as pype
class CollectCurrentFile(pyblish.api.ContextPlugin):
"""Inject the current working file into context"""
@ -8,10 +7,7 @@ class CollectCurrentFile(pyblish.api.ContextPlugin):
def process(self, context):
"""Todo, inject the current working file"""
project = context.data('activeProject')
context.data["currentFile"] = path = project.path()
context.data["version"] = pype.get_version_from_path(path)
self.log.info("currentFile: {}".format(context.data["currentFile"]))
self.log.info("version: {}".format(context.data["version"]))

View file

@ -0,0 +1,15 @@
import pyblish.api
import pype.api as pype
class CollectWorkfileVersion(pyblish.api.ContextPlugin):
"""Inject the current working file version into context"""
order = pyblish.api.CollectorOrder - 0.1
label = "Collect workfile version"
def process(self, context):
project = context.data('activeProject')
path = project.path()
context.data["version"] = pype.get_version_from_path(path)
self.log.info("version: {}".format(context.data["version"]))

View file

@ -0,0 +1,74 @@
import pyblish
from avalon import io
from pype.action import get_errored_instances_from_context
import pype.api as pype
@pyblish.api.log
class RepairNukestudioVersionUp(pyblish.api.Action):
label = "Version Up Workfile"
on = "failed"
icon = "wrench"
def process(self, context, plugin):
errored_instances = get_errored_instances_from_context(context)
# Apply pyblish logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
if instances:
project = context.data["activeProject"]
path = context.data.get("currentFile")
new_path = pype.version_up(path)
if project:
project.saveAs(new_path)
self.log.info("Project workfile version was fixed")
class ValidateVersion(pyblish.api.InstancePlugin):
"""Validate clip's versions.
"""
order = pyblish.api.ValidatorOrder
families = ["plate"]
label = "Validate Version"
actions = [RepairNukestudioVersionUp]
hosts = ["nukestudio"]
def process(self, instance):
version = int(instance.data.get("version", 0))
asset_name = instance.data.get("asset", None)
subset_name = instance.data.get("subset", None)
assert version, "The file is missing version string! example: filename_v001.hrox `{}`"
self.log.debug("Collected version: `{0}`".format(version))
found_v = 0
try:
io.install()
project = io.find_one({"type": "project"})
asset = io.find_one({"type": "asset",
"name": asset_name,
"parent": project["_id"]})
subset = io.find_one({"type": "subset",
"parent": asset["_id"],
"name": subset_name})
version_db = io.find_one({
'type': 'version',
'parent': subset["_id"],
'name': version
}) or {}
found_v = version_db.get("name", 0)
self.log.debug("Found version: `{0}`".format(found_v))
except Exception as e:
self.log.debug("Problem to get data from database for asset `{0}` subset `{1}`. Error: `{2}`".format(asset_name, subset_name, e))
assert (found_v != version), "Version must not be the same as in database `{0}`, Versions file: `{1}`, db: `{2}`".format(asset_name, version, found_v)

View file

@ -0,0 +1,23 @@
from pyblish import api
import pype.api as pype
class VersionUpWorkfile(api.ContextPlugin):
"""Save as new workfile version"""
order = api.IntegratorOrder + 10.1
label = "Version-up Workfile"
hosts = ["nukestudio"]
optional = True
active = True
def process(self, context):
project = context.data["activeProject"]
path = context.data.get("currentFile")
new_path = pype.version_up(path)
if project:
project.saveAs(new_path)
self.log.info("Project workfile was versioned up")

View file

@ -96,9 +96,6 @@ def install():
avalon.data["familiesStateDefault"] = False
avalon.data["familiesStateToggled"] = family_states
# load data from templates
api.load_data_from_templates()
# synchronize extensions
extensions_sync()
message(title="pyblish_paths", message=str(reg_paths), level="info")
@ -109,6 +106,3 @@ def uninstall():
pyblish.deregister_plugin_path(PUBLISH_PATH)
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
# reset data from templates
api.reset_data_from_templates()

View file

@ -1,10 +1,12 @@
from pype import api as pype
from pypeapp import Anatomy, config
log = pype.Logger().get_logger(__name__, "premiere")
def get_anatomy(**kwarg):
return pype.Anatomy
return Anatomy()
def get_dataflow(**kwarg):
@ -15,7 +17,8 @@ def get_dataflow(**kwarg):
assert any([host, cls]), log.error("premiera.templates.get_dataflow():"
"Missing mandatory kwargs `host`, `cls`")
pr_dataflow = getattr(pype.Dataflow, str(host), None)
presets = config.get_init_presets()
pr_dataflow = getattr(presets["dataflow"], str(host), None)
pr_dataflow_node = getattr(pr_dataflow.nodes, str(cls), None)
if preset:
pr_dataflow_node = getattr(pr_dataflow_node, str(preset), None)
@ -32,7 +35,8 @@ def get_colorspace(**kwarg):
assert any([host, cls]), log.error("premiera.templates.get_colorspace():"
"Missing mandatory kwargs `host`, `cls`")
pr_colorspace = getattr(pype.Colorspace, str(host), None)
presets = config.get_init_presets()
pr_colorspace = getattr(presets["colorspace"], str(host), None)
pr_colorspace_node = getattr(pr_colorspace, str(cls), None)
if preset:
pr_colorspace_node = getattr(pr_colorspace_node, str(preset), None)

View file

@ -4,7 +4,7 @@ import sys
from avalon import io, api as avalon, lib as avalonlib
from . import lib
# from pypeapp.api import (Templates, Logger, format)
from pypeapp import Logger, config, Anatomy
from pypeapp import Logger, Anatomy
log = Logger().get_logger(__name__, os.getenv("AVALON_APP", "pype-config"))
@ -17,63 +17,6 @@ def set_session():
self.SESSION = avalon.session
def load_data_from_templates():
"""
Load Presets and Anatomy `contextual` data as singleton object
[info](https://en.wikipedia.org/wiki/Singleton_pattern)
Returns:
singleton: adding data to sharable object variable
"""
from . import api
if not any([
api.Dataflow,
api.Anatomy,
api.Colorspace
]
):
presets = config.get_presets()
anatomy = Anatomy()
try:
# try if it is not in projects custom directory
# `{PYPE_PROJECT_CONFIGS}/[PROJECT_NAME]/init.json`
# init.json define preset names to be used
p_init = presets["init"]
colorspace = presets["colorspace"][p_init["colorspace"]]
dataflow = presets["dataflow"][p_init["dataflow"]]
except KeyError:
log.warning("No projects custom preset available...")
colorspace = presets["colorspace"]["default"]
dataflow = presets["dataflow"]["default"]
log.info("Presets `colorspace` and `dataflow` loaded from `default`...")
api.Anatomy = anatomy
api.Dataflow = dataflow
api.Colorspace = colorspace
log.info("Data from templates were Loaded...")
def reset_data_from_templates():
"""
Clear Templates `contextual` data from singleton
object variable
Returns:
singleton: clearing data to None
"""
from . import api
api.Dataflow = None
api.Anatomy = None
api.Colorspace = None
log.info("Data from templates were Unloaded...")
def get_version_from_path(file):
"""
Finds version number in file path string
@ -265,7 +208,9 @@ def set_avalon_workdir(project=None,
if self.SESSION is None:
set_session()
awd = self.SESSION.get("AVALON_WORKDIR", None) or os.getenv("AVALON_WORKDIR", None)
awd = self.SESSION.get("AVALON_WORKDIR", None) or \
os.getenv("AVALON_WORKDIR", None)
data = get_context_data(project, hierarchy, asset, task)
if (not awd) or ("{" not in awd):
@ -280,7 +225,7 @@ def set_avalon_workdir(project=None,
def get_workdir_template(data=None):
"""
Obtain workdir templated path from api.Anatomy singleton
Obtain workdir templated path from Anatomy()
Args:
data (dict, optional): basic contextual data
@ -288,12 +233,8 @@ def get_workdir_template(data=None):
Returns:
string: template path
"""
from . import api
""" Installs singleton data """
load_data_from_templates()
anatomy = api.Anatomy
anatomy = Anatomy()
anatomy_filled = anatomy.format(data or get_context_data())
try: