Merge remote-tracking branch 'origin/develop' into release/v2.1.0

This commit is contained in:
Milan Kolar 2019-08-06 17:36:19 +02:00
commit 893090555d
8 changed files with 127 additions and 6 deletions

View file

@ -4,6 +4,7 @@ import time
from pype.ftrack import AppAction
from avalon import lib
from pypeapp import Logger
from pype.lib import get_all_avalon_projects
log = Logger().get_logger(__name__)
@ -48,6 +49,10 @@ def registerApp(app, session):
def register(session):
# WARNING getting projects only helps to check connection to mongo
# - without will `discover` of ftrack apps actions take ages
result = get_all_avalon_projects()
apps = []
launchers_path = os.path.join(os.environ["PYPE_CONFIG"], "launchers")

View file

@ -249,7 +249,9 @@ class AttributesRemapper(BaseAction):
if interface_messages:
self.show_interface_from_dict(
event, interface_messages, "Errors during remapping attributes"
messages=interface_messages,
title="Errors during remapping attributes",
event=event
)
return True

View file

@ -186,7 +186,10 @@ class SyncHierarchicalAttrs(BaseAction):
job['status'] = 'failed'
session.commit()
if self.interface_messages:
self.show_interface_from_dict(self.interface_messages, event)
title = "Errors during SyncHierarchicalAttrs"
self.show_interface_from_dict(
messages=self.interface_messages, title=title, event=event
)
return True

View file

@ -495,13 +495,12 @@ class BaseHandler(object):
)
def show_interface_from_dict(
self, messages, event=None, user=None, username=None, user_id=None
self, messages, title="", event=None, user=None, username=None, user_id=None
):
if not messages:
self.log.debug("No messages to show! (messages dict is empty)")
return
items = []
title = 'Errors during mirroring'
splitter = {'type': 'label', 'value': '---'}
first = True
for key, value in messages.items():

View file

@ -51,7 +51,7 @@ def get_hierarchy(asset_name=None):
})
not_set = "PARENTS_NOT_SET"
entity_parents = entity.get("data", {}).get("parents", not_set)
entity_parents = asset_entity.get("data", {}).get("parents", not_set)
# If entity already have parents then just return joined
if entity_parents != not_set:

View file

@ -234,7 +234,6 @@ def create_write_node(name, data, prenodes=None):
"nuke_dataflow_writes": nuke_dataflow_writes,
"nuke_colorspace_writes": nuke_colorspace_writes
})
anatomy_filled = format_anatomy(data)
except Exception as e:

View file

@ -0,0 +1,30 @@
import toml
import nuke
import pyblish.api
class CollectReadLegacy(pyblish.api.ContextPlugin):
"""Collect legacy read nodes."""
order = pyblish.api.CollectorOrder
label = "Collect Read Legacy"
hosts = ["nuke", "nukeassist"]
def process(self, context):
for node in nuke.allNodes():
if node.Class() != "Read":
continue
if "avalon" not in node.knobs().keys():
continue
if not toml.loads(node["avalon"].value()):
return
instance = context.create_instance(
node.name(), family="read.legacy"
)
instance.append(node)

View file

@ -0,0 +1,83 @@
import os
import toml
import nuke
import pyblish.api
from avalon import api
from bson.objectid import ObjectId
class RepairReadLegacyAction(pyblish.api.Action):
label = "Repair"
icon = "wrench"
on = "failed"
def process(self, context, plugin):
# Get the errored instances
failed = []
for result in context.data["results"]:
if (result["error"] is not None and result["instance"] is not None
and result["instance"] not in failed):
failed.append(result["instance"])
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(failed, plugin)
for instance in instances:
data = toml.loads(instance[0]["avalon"].value())
data["name"] = instance[0].name()
data["xpos"] = instance[0].xpos()
data["ypos"] = instance[0].ypos()
data["extension"] = os.path.splitext(
instance[0]["file"].value()
)[1][1:]
data["connections"] = []
for d in instance[0].dependent():
for i in range(d.inputs()):
if d.input(i) == instance[0]:
data["connections"].append([i, d])
nuke.delete(instance[0])
loader_name = "LoadSequence"
if data["extension"] == "mov":
loader_name = "LoadMov"
loader_plugin = None
for Loader in api.discover(api.Loader):
if Loader.__name__ != loader_name:
continue
loader_plugin = Loader
api.load(
Loader=loader_plugin,
representation=ObjectId(data["representation"])
)
node = nuke.toNode(data["name"])
for connection in data["connections"]:
connection[1].setInput(connection[0], node)
node.setXYpos(data["xpos"], data["ypos"])
class ValidateReadLegacy(pyblish.api.InstancePlugin):
"""Validate legacy read instance[0]s."""
order = pyblish.api.ValidatorOrder
optional = True
families = ["read.legacy"]
label = "Read Legacy"
hosts = ["nuke"]
actions = [RepairReadLegacyAction]
def process(self, instance):
msg = "Clean up legacy read node \"{}\"".format(instance)
assert False, msg