mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
32711900a6
24 changed files with 1133 additions and 206 deletions
|
|
@ -41,7 +41,7 @@ class ExtractThumnail(publish.Extractor):
|
|||
track_item_name, thumb_frame, ".png")
|
||||
thumb_path = os.path.join(staging_dir, thumb_file)
|
||||
|
||||
thumbnail = track_item.thumbnail(thumb_frame).save(
|
||||
thumbnail = track_item.thumbnail(thumb_frame, "colour").save(
|
||||
thumb_path,
|
||||
format='png'
|
||||
)
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ class MayaTemplateBuilder(AbstractTemplateBuilder):
|
|||
|
||||
Args:
|
||||
path (str): A path to current template (usually given by
|
||||
get_template_path implementation)
|
||||
get_template_preset implementation)
|
||||
|
||||
Returns:
|
||||
bool: Wether the template was succesfully imported or not
|
||||
|
|
@ -240,7 +240,7 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
|
|||
cmds.setAttr(node + ".hiddenInOutliner", True)
|
||||
|
||||
def load_succeed(self, placeholder, container):
|
||||
self._parent_in_hierarhchy(placeholder, container)
|
||||
self._parent_in_hierarchy(placeholder, container)
|
||||
|
||||
def _parent_in_hierarchy(self, placeholder, container):
|
||||
"""Parent loaded container to placeholder's parent.
|
||||
|
|
|
|||
|
|
@ -72,15 +72,19 @@ class CreateRender(plugin.Creator):
|
|||
def __init__(self, *args, **kwargs):
|
||||
"""Constructor."""
|
||||
super(CreateRender, self).__init__(*args, **kwargs)
|
||||
deadline_settings = get_system_settings()["modules"]["deadline"]
|
||||
if not deadline_settings["enabled"]:
|
||||
self.deadline_servers = {}
|
||||
return
|
||||
|
||||
# Defaults
|
||||
self._project_settings = get_project_settings(
|
||||
legacy_io.Session["AVALON_PROJECT"])
|
||||
if self._project_settings["maya"]["RenderSettings"]["apply_render_settings"]: # noqa
|
||||
lib_rendersettings.RenderSettings().set_default_renderer_settings()
|
||||
|
||||
# Deadline-only
|
||||
manager = ModulesManager()
|
||||
deadline_settings = get_system_settings()["modules"]["deadline"]
|
||||
if not deadline_settings["enabled"]:
|
||||
self.deadline_servers = {}
|
||||
return
|
||||
self.deadline_module = manager.modules_by_name["deadline"]
|
||||
try:
|
||||
default_servers = deadline_settings["deadline_urls"]
|
||||
|
|
@ -193,8 +197,6 @@ class CreateRender(plugin.Creator):
|
|||
pool_names = []
|
||||
default_priority = 50
|
||||
|
||||
self.server_aliases = list(self.deadline_servers.keys())
|
||||
self.data["deadlineServers"] = self.server_aliases
|
||||
self.data["suspendPublishJob"] = False
|
||||
self.data["review"] = True
|
||||
self.data["extendFrames"] = False
|
||||
|
|
@ -233,6 +235,9 @@ class CreateRender(plugin.Creator):
|
|||
raise RuntimeError("Both Deadline and Muster are enabled")
|
||||
|
||||
if deadline_enabled:
|
||||
self.server_aliases = list(self.deadline_servers.keys())
|
||||
self.data["deadlineServers"] = self.server_aliases
|
||||
|
||||
try:
|
||||
deadline_url = self.deadline_servers["default"]
|
||||
except KeyError:
|
||||
|
|
@ -254,6 +259,19 @@ class CreateRender(plugin.Creator):
|
|||
default_priority)
|
||||
self.data["tile_priority"] = tile_priority
|
||||
|
||||
pool_setting = (self._project_settings["deadline"]
|
||||
["publish"]
|
||||
["CollectDeadlinePools"])
|
||||
primary_pool = pool_setting["primary_pool"]
|
||||
self.data["primaryPool"] = self._set_default_pool(pool_names,
|
||||
primary_pool)
|
||||
# We add a string "-" to allow the user to not
|
||||
# set any secondary pools
|
||||
pool_names = ["-"] + pool_names
|
||||
secondary_pool = pool_setting["secondary_pool"]
|
||||
self.data["secondaryPool"] = self._set_default_pool(pool_names,
|
||||
secondary_pool)
|
||||
|
||||
if muster_enabled:
|
||||
self.log.info(">>> Loading Muster credentials ...")
|
||||
self._load_credentials()
|
||||
|
|
@ -273,18 +291,6 @@ class CreateRender(plugin.Creator):
|
|||
self.log.info(" - pool: {}".format(pool["name"]))
|
||||
pool_names.append(pool["name"])
|
||||
|
||||
pool_setting = (self._project_settings["deadline"]
|
||||
["publish"]
|
||||
["CollectDeadlinePools"])
|
||||
primary_pool = pool_setting["primary_pool"]
|
||||
self.data["primaryPool"] = self._set_default_pool(pool_names,
|
||||
primary_pool)
|
||||
# We add a string "-" to allow the user to not
|
||||
# set any secondary pools
|
||||
pool_names = ["-"] + pool_names
|
||||
secondary_pool = pool_setting["secondary_pool"]
|
||||
self.data["secondaryPool"] = self._set_default_pool(pool_names,
|
||||
secondary_pool)
|
||||
self.options = {"useSelection": False} # Force no content
|
||||
|
||||
def _set_default_pool(self, pool_names, pool_value):
|
||||
|
|
|
|||
|
|
@ -174,9 +174,6 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
if "handles" in data:
|
||||
data["handleStart"] = data["handles"]
|
||||
data["handleEnd"] = data["handles"]
|
||||
else:
|
||||
data["handleStart"] = 0
|
||||
data["handleEnd"] = 0
|
||||
|
||||
data["frameStartHandle"] = data["frameStart"] - data["handleStart"] # noqa: E501
|
||||
data["frameEndHandle"] = data["frameEnd"] + data["handleEnd"] # noqa: E501
|
||||
|
|
|
|||
|
|
@ -5,6 +5,11 @@ from openpype.pipeline.publish import (
|
|||
RepairAction,
|
||||
ValidateContentsOrder,
|
||||
)
|
||||
from openpype.hosts.maya.api.lib_rendersetup import (
|
||||
get_attr_overrides,
|
||||
get_attr_in_layer,
|
||||
)
|
||||
from maya.app.renderSetup.model.override import AbsOverride
|
||||
|
||||
|
||||
class ValidateFrameRange(pyblish.api.InstancePlugin):
|
||||
|
|
@ -92,10 +97,86 @@ class ValidateFrameRange(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
Repair instance container to match asset data.
|
||||
"""
|
||||
cmds.setAttr(
|
||||
"{}.frameStart".format(instance.data["name"]),
|
||||
instance.context.data.get("frameStartHandle"))
|
||||
|
||||
cmds.setAttr(
|
||||
"{}.frameEnd".format(instance.data["name"]),
|
||||
instance.context.data.get("frameEndHandle"))
|
||||
if "renderlayer" in instance.data.get("families"):
|
||||
# Special behavior for renderlayers
|
||||
cls.repair_renderlayer(instance)
|
||||
return
|
||||
|
||||
node = instance.data["name"]
|
||||
context = instance.context
|
||||
|
||||
frame_start_handle = int(context.data.get("frameStartHandle"))
|
||||
frame_end_handle = int(context.data.get("frameEndHandle"))
|
||||
handle_start = int(context.data.get("handleStart"))
|
||||
handle_end = int(context.data.get("handleEnd"))
|
||||
frame_start = int(context.data.get("frameStart"))
|
||||
frame_end = int(context.data.get("frameEnd"))
|
||||
|
||||
# Start
|
||||
if cmds.attributeQuery("handleStart", node=node, exists=True):
|
||||
cmds.setAttr("{}.handleStart".format(node), handle_start)
|
||||
cmds.setAttr("{}.frameStart".format(node), frame_start)
|
||||
else:
|
||||
# Include start handle in frame start if no separate handleStart
|
||||
# attribute exists on the node
|
||||
cmds.setAttr("{}.frameStart".format(node), frame_start_handle)
|
||||
|
||||
# End
|
||||
if cmds.attributeQuery("handleEnd", node=node, exists=True):
|
||||
cmds.setAttr("{}.handleEnd".format(node), handle_end)
|
||||
cmds.setAttr("{}.frameEnd".format(node), frame_end)
|
||||
else:
|
||||
# Include end handle in frame end if no separate handleEnd
|
||||
# attribute exists on the node
|
||||
cmds.setAttr("{}.frameEnd".format(node), frame_end_handle)
|
||||
|
||||
@classmethod
|
||||
def repair_renderlayer(cls, instance):
|
||||
"""Apply frame range in render settings"""
|
||||
|
||||
layer = instance.data["setMembers"]
|
||||
context = instance.context
|
||||
|
||||
start_attr = "defaultRenderGlobals.startFrame"
|
||||
end_attr = "defaultRenderGlobals.endFrame"
|
||||
|
||||
frame_start_handle = int(context.data.get("frameStartHandle"))
|
||||
frame_end_handle = int(context.data.get("frameEndHandle"))
|
||||
|
||||
cls._set_attr_in_layer(start_attr, layer, frame_start_handle)
|
||||
cls._set_attr_in_layer(end_attr, layer, frame_end_handle)
|
||||
|
||||
@classmethod
|
||||
def _set_attr_in_layer(cls, node_attr, layer, value):
|
||||
|
||||
if get_attr_in_layer(node_attr, layer=layer) == value:
|
||||
# Already ok. This can happen if you have multiple renderlayers
|
||||
# validated and there are no frame range overrides. The first
|
||||
# layer's repair would have fixed the global value already
|
||||
return
|
||||
|
||||
overrides = list(get_attr_overrides(node_attr, layer=layer))
|
||||
if overrides:
|
||||
# We set the last absolute override if it is an absolute override
|
||||
# otherwise we'll add an Absolute override
|
||||
last_override = overrides[-1][1]
|
||||
if not isinstance(last_override, AbsOverride):
|
||||
collection = last_override.parent()
|
||||
node, attr = node_attr.split(".", 1)
|
||||
last_override = collection.createAbsoluteOverride(node, attr)
|
||||
|
||||
cls.log.debug("Setting {attr} absolute override in "
|
||||
"layer '{layer}': {value}".format(layer=layer,
|
||||
attr=node_attr,
|
||||
value=value))
|
||||
cmds.setAttr(last_override.name() + ".attrValue", value)
|
||||
|
||||
else:
|
||||
# Set the attribute directly
|
||||
# (Note that this will set the global attribute)
|
||||
cls.log.debug("Setting global {attr}: {value}".format(
|
||||
attr=node_attr,
|
||||
value=value
|
||||
))
|
||||
cmds.setAttr(node_attr, value)
|
||||
|
|
|
|||
|
|
@ -2865,10 +2865,11 @@ def get_group_io_nodes(nodes):
|
|||
break
|
||||
|
||||
if input_node is None:
|
||||
raise ValueError("No Input found")
|
||||
log.warning("No Input found")
|
||||
|
||||
if output_node is None:
|
||||
raise ValueError("No Output found")
|
||||
log.warning("No Output found")
|
||||
|
||||
return input_node, output_node
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -35,6 +35,7 @@ from .lib import (
|
|||
)
|
||||
from .workfile_template_builder import (
|
||||
NukePlaceholderLoadPlugin,
|
||||
NukePlaceholderCreatePlugin,
|
||||
build_workfile_template,
|
||||
update_workfile_template,
|
||||
create_placeholder,
|
||||
|
|
@ -139,7 +140,8 @@ def _show_workfiles():
|
|||
|
||||
def get_workfile_build_placeholder_plugins():
|
||||
return [
|
||||
NukePlaceholderLoadPlugin
|
||||
NukePlaceholderLoadPlugin,
|
||||
NukePlaceholderCreatePlugin
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -217,10 +219,6 @@ def _install_menu():
|
|||
"Build Workfile from template",
|
||||
lambda: build_workfile_template()
|
||||
)
|
||||
menu_template.addCommand(
|
||||
"Update Workfile",
|
||||
lambda: update_workfile_template()
|
||||
)
|
||||
menu_template.addSeparator()
|
||||
menu_template.addCommand(
|
||||
"Create Place Holder",
|
||||
|
|
|
|||
|
|
@ -7,7 +7,9 @@ from openpype.pipeline.workfile.workfile_template_builder import (
|
|||
AbstractTemplateBuilder,
|
||||
PlaceholderPlugin,
|
||||
LoadPlaceholderItem,
|
||||
CreatePlaceholderItem,
|
||||
PlaceholderLoadMixin,
|
||||
PlaceholderCreateMixin
|
||||
)
|
||||
from openpype.tools.workfile_template_build import (
|
||||
WorkfileBuildPlaceholderDialog,
|
||||
|
|
@ -32,7 +34,7 @@ PLACEHOLDER_SET = "PLACEHOLDERS_SET"
|
|||
|
||||
|
||||
class NukeTemplateBuilder(AbstractTemplateBuilder):
|
||||
"""Concrete implementation of AbstractTemplateBuilder for maya"""
|
||||
"""Concrete implementation of AbstractTemplateBuilder for nuke"""
|
||||
|
||||
def import_template(self, path):
|
||||
"""Import template into current scene.
|
||||
|
|
@ -40,7 +42,7 @@ class NukeTemplateBuilder(AbstractTemplateBuilder):
|
|||
|
||||
Args:
|
||||
path (str): A path to current template (usually given by
|
||||
get_template_path implementation)
|
||||
get_template_preset implementation)
|
||||
|
||||
Returns:
|
||||
bool: Wether the template was succesfully imported or not
|
||||
|
|
@ -74,8 +76,7 @@ class NukePlaceholderPlugin(PlaceholderPlugin):
|
|||
|
||||
node_knobs = node.knobs()
|
||||
if (
|
||||
"builder_type" not in node_knobs
|
||||
or "is_placeholder" not in node_knobs
|
||||
"is_placeholder" not in node_knobs
|
||||
or not node.knob("is_placeholder").value()
|
||||
):
|
||||
continue
|
||||
|
|
@ -273,6 +274,15 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin):
|
|||
|
||||
placeholder.data["nb_children"] += 1
|
||||
reset_selection()
|
||||
|
||||
# remove placeholders marked as delete
|
||||
if (
|
||||
placeholder.data.get("delete")
|
||||
and not placeholder.data.get("keep_placeholder")
|
||||
):
|
||||
self.log.debug("Deleting node: {}".format(placeholder_node.name()))
|
||||
nuke.delete(placeholder_node)
|
||||
|
||||
# go back to root group
|
||||
nuke.root().begin()
|
||||
|
||||
|
|
@ -454,12 +464,12 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin):
|
|||
)
|
||||
for node in placeholder_node.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) == placeholder_node:
|
||||
if node.input(idx) == placeholder_node and output_node:
|
||||
node.setInput(idx, output_node)
|
||||
|
||||
for node in placeholder_node.dependencies():
|
||||
for idx in range(placeholder_node.inputs()):
|
||||
if placeholder_node.input(idx) == node:
|
||||
if placeholder_node.input(idx) == node and input_node:
|
||||
input_node.setInput(0, node)
|
||||
|
||||
def _create_sib_copies(self, placeholder):
|
||||
|
|
@ -535,6 +545,408 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin):
|
|||
siblings_input.setInput(0, copy_output)
|
||||
|
||||
|
||||
class NukePlaceholderCreatePlugin(
|
||||
NukePlaceholderPlugin, PlaceholderCreateMixin
|
||||
):
|
||||
identifier = "nuke.create"
|
||||
label = "Nuke create"
|
||||
|
||||
def _parse_placeholder_node_data(self, node):
|
||||
placeholder_data = super(
|
||||
NukePlaceholderCreatePlugin, self
|
||||
)._parse_placeholder_node_data(node)
|
||||
|
||||
node_knobs = node.knobs()
|
||||
nb_children = 0
|
||||
if "nb_children" in node_knobs:
|
||||
nb_children = int(node_knobs["nb_children"].getValue())
|
||||
placeholder_data["nb_children"] = nb_children
|
||||
|
||||
siblings = []
|
||||
if "siblings" in node_knobs:
|
||||
siblings = node_knobs["siblings"].values()
|
||||
placeholder_data["siblings"] = siblings
|
||||
|
||||
node_full_name = node.fullName()
|
||||
placeholder_data["group_name"] = node_full_name.rpartition(".")[0]
|
||||
placeholder_data["last_loaded"] = []
|
||||
placeholder_data["delete"] = False
|
||||
return placeholder_data
|
||||
|
||||
def _before_instance_create(self, placeholder):
|
||||
placeholder.data["nodes_init"] = nuke.allNodes()
|
||||
|
||||
def collect_placeholders(self):
|
||||
output = []
|
||||
scene_placeholders = self._collect_scene_placeholders()
|
||||
for node_name, node in scene_placeholders.items():
|
||||
plugin_identifier_knob = node.knob("plugin_identifier")
|
||||
if (
|
||||
plugin_identifier_knob is None
|
||||
or plugin_identifier_knob.getValue() != self.identifier
|
||||
):
|
||||
continue
|
||||
|
||||
placeholder_data = self._parse_placeholder_node_data(node)
|
||||
|
||||
output.append(
|
||||
CreatePlaceholderItem(node_name, placeholder_data, self)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
self.populate_create_placeholder(placeholder)
|
||||
|
||||
def repopulate_placeholder(self, placeholder):
|
||||
self.populate_create_placeholder(placeholder)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_create_plugin_options(options)
|
||||
|
||||
def cleanup_placeholder(self, placeholder, failed):
|
||||
# deselect all selected nodes
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
# getting the latest nodes added
|
||||
nodes_init = placeholder.data["nodes_init"]
|
||||
nodes_created = list(set(nuke.allNodes()) - set(nodes_init))
|
||||
self.log.debug("Created nodes: {}".format(nodes_created))
|
||||
if not nodes_created:
|
||||
return
|
||||
|
||||
placeholder.data["delete"] = True
|
||||
|
||||
nodes_created = self._move_to_placeholder_group(
|
||||
placeholder, nodes_created
|
||||
)
|
||||
placeholder.data["last_created"] = nodes_created
|
||||
refresh_nodes(nodes_created)
|
||||
|
||||
# positioning of the created nodes
|
||||
min_x, min_y, _, _ = get_extreme_positions(nodes_created)
|
||||
for node in nodes_created:
|
||||
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
|
||||
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
|
||||
node.setXYpos(xpos, ypos)
|
||||
refresh_nodes(nodes_created)
|
||||
|
||||
# fix the problem of z_order for backdrops
|
||||
self._fix_z_order(placeholder)
|
||||
self._imprint_siblings(placeholder)
|
||||
|
||||
if placeholder.data["nb_children"] == 0:
|
||||
# save initial nodes postions and dimensions, update them
|
||||
# and set inputs and outputs of created nodes
|
||||
|
||||
self._imprint_inits()
|
||||
self._update_nodes(placeholder, nuke.allNodes(), nodes_created)
|
||||
self._set_created_connections(placeholder)
|
||||
|
||||
elif placeholder.data["siblings"]:
|
||||
# create copies of placeholder siblings for the new created nodes,
|
||||
# set their inputs and outpus and update all nodes positions and
|
||||
# dimensions and siblings names
|
||||
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
refresh_nodes(siblings)
|
||||
copies = self._create_sib_copies(placeholder)
|
||||
new_nodes = list(copies.values()) # copies nodes
|
||||
self._update_nodes(new_nodes, nodes_created)
|
||||
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
|
||||
new_nodes_name = get_names_from_nodes(new_nodes)
|
||||
imprint(placeholder_node, {"siblings": new_nodes_name})
|
||||
self._set_copies_connections(placeholder, copies)
|
||||
|
||||
self._update_nodes(
|
||||
nuke.allNodes(),
|
||||
new_nodes + nodes_created,
|
||||
20
|
||||
)
|
||||
|
||||
new_siblings = get_names_from_nodes(new_nodes)
|
||||
placeholder.data["siblings"] = new_siblings
|
||||
|
||||
else:
|
||||
# if the placeholder doesn't have siblings, the created
|
||||
# nodes will be placed in a free space
|
||||
|
||||
xpointer, ypointer = find_free_space_to_paste_nodes(
|
||||
nodes_created, direction="bottom", offset=200
|
||||
)
|
||||
node = nuke.createNode("NoOp")
|
||||
reset_selection()
|
||||
nuke.delete(node)
|
||||
for node in nodes_created:
|
||||
xpos = (node.xpos() - min_x) + xpointer
|
||||
ypos = (node.ypos() - min_y) + ypointer
|
||||
node.setXYpos(xpos, ypos)
|
||||
|
||||
placeholder.data["nb_children"] += 1
|
||||
reset_selection()
|
||||
|
||||
# remove placeholders marked as delete
|
||||
if (
|
||||
placeholder.data.get("delete")
|
||||
and not placeholder.data.get("keep_placeholder")
|
||||
):
|
||||
self.log.debug("Deleting node: {}".format(placeholder_node.name()))
|
||||
nuke.delete(placeholder_node)
|
||||
|
||||
# go back to root group
|
||||
nuke.root().begin()
|
||||
|
||||
def _move_to_placeholder_group(self, placeholder, nodes_created):
|
||||
"""
|
||||
opening the placeholder's group and copying created nodes in it.
|
||||
|
||||
Returns :
|
||||
nodes_created (list): the new list of pasted nodes
|
||||
"""
|
||||
groups_name = placeholder.data["group_name"]
|
||||
reset_selection()
|
||||
select_nodes(nodes_created)
|
||||
if groups_name:
|
||||
with node_tempfile() as filepath:
|
||||
nuke.nodeCopy(filepath)
|
||||
for node in nuke.selectedNodes():
|
||||
nuke.delete(node)
|
||||
group = nuke.toNode(groups_name)
|
||||
group.begin()
|
||||
nuke.nodePaste(filepath)
|
||||
nodes_created = nuke.selectedNodes()
|
||||
return nodes_created
|
||||
|
||||
def _fix_z_order(self, placeholder):
|
||||
"""Fix the problem of z_order when a backdrop is create."""
|
||||
|
||||
nodes_created = placeholder.data["last_created"]
|
||||
created_backdrops = []
|
||||
bd_orders = set()
|
||||
for node in nodes_created:
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
created_backdrops.append(node)
|
||||
bd_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not bd_orders:
|
||||
return
|
||||
|
||||
sib_orders = set()
|
||||
for node_name in placeholder.data["siblings"]:
|
||||
node = nuke.toNode(node_name)
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
sib_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not sib_orders:
|
||||
return
|
||||
|
||||
min_order = min(bd_orders)
|
||||
max_order = max(sib_orders)
|
||||
for backdrop_node in created_backdrops:
|
||||
z_order = backdrop_node.knob("z_order").getValue()
|
||||
backdrop_node.knob("z_order").setValue(
|
||||
z_order + max_order - min_order + 1)
|
||||
|
||||
def _imprint_siblings(self, placeholder):
|
||||
"""
|
||||
- add siblings names to placeholder attributes (nodes created with it)
|
||||
- add Id to the attributes of all the other nodes
|
||||
"""
|
||||
|
||||
created_nodes = placeholder.data["last_created"]
|
||||
created_nodes_set = set(created_nodes)
|
||||
|
||||
for node in created_nodes:
|
||||
node_knobs = node.knobs()
|
||||
|
||||
if (
|
||||
"is_placeholder" not in node_knobs
|
||||
or (
|
||||
"is_placeholder" in node_knobs
|
||||
and node.knob("is_placeholder").value()
|
||||
)
|
||||
):
|
||||
siblings = list(created_nodes_set - {node})
|
||||
siblings_name = get_names_from_nodes(siblings)
|
||||
siblings = {"siblings": siblings_name}
|
||||
imprint(node, siblings)
|
||||
|
||||
def _imprint_inits(self):
|
||||
"""Add initial positions and dimensions to the attributes"""
|
||||
|
||||
for node in nuke.allNodes():
|
||||
refresh_node(node)
|
||||
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
|
||||
node.knob("x_init").setVisible(False)
|
||||
node.knob("y_init").setVisible(False)
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
if "bdwidth" in node.knobs():
|
||||
imprint(node, {"w_init": width, "h_init": height})
|
||||
node.knob("w_init").setVisible(False)
|
||||
node.knob("h_init").setVisible(False)
|
||||
refresh_node(node)
|
||||
|
||||
def _update_nodes(
|
||||
self, placeholder, nodes, considered_nodes, offset_y=None
|
||||
):
|
||||
"""Adjust backdrop nodes dimensions and positions.
|
||||
|
||||
Considering some nodes sizes.
|
||||
|
||||
Args:
|
||||
nodes (list): list of nodes to update
|
||||
considered_nodes (list): list of nodes to consider while updating
|
||||
positions and dimensions
|
||||
offset (int): distance between copies
|
||||
"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes)
|
||||
|
||||
diff_x = diff_y = 0
|
||||
contained_nodes = [] # for backdrops
|
||||
|
||||
if offset_y is None:
|
||||
width_ph = placeholder_node.screenWidth()
|
||||
height_ph = placeholder_node.screenHeight()
|
||||
diff_y = max_y - min_y - height_ph
|
||||
diff_x = max_x - min_x - width_ph
|
||||
contained_nodes = [placeholder_node]
|
||||
min_x = placeholder_node.xpos()
|
||||
min_y = placeholder_node.ypos()
|
||||
else:
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
minX, _, maxX, _ = get_extreme_positions(siblings)
|
||||
diff_y = max_y - min_y + 20
|
||||
diff_x = abs(max_x - min_x - maxX + minX)
|
||||
contained_nodes = considered_nodes
|
||||
|
||||
if diff_y <= 0 and diff_x <= 0:
|
||||
return
|
||||
|
||||
for node in nodes:
|
||||
refresh_node(node)
|
||||
|
||||
if (
|
||||
node == placeholder_node
|
||||
or node in considered_nodes
|
||||
):
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(node, nuke.BackdropNode)
|
||||
or (
|
||||
isinstance(node, nuke.BackdropNode)
|
||||
and not set(contained_nodes) <= set(node.getNodes())
|
||||
)
|
||||
):
|
||||
if offset_y is None and node.xpos() >= min_x:
|
||||
node.setXpos(node.xpos() + diff_x)
|
||||
|
||||
if node.ypos() >= min_y:
|
||||
node.setYpos(node.ypos() + diff_y)
|
||||
|
||||
else:
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
node.knob("bdwidth").setValue(width + diff_x)
|
||||
node.knob("bdheight").setValue(height + diff_y)
|
||||
|
||||
refresh_node(node)
|
||||
|
||||
def _set_created_connections(self, placeholder):
|
||||
"""
|
||||
set inputs and outputs of created nodes"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
input_node, output_node = get_group_io_nodes(
|
||||
placeholder.data["last_created"]
|
||||
)
|
||||
for node in placeholder_node.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) == placeholder_node and output_node:
|
||||
node.setInput(idx, output_node)
|
||||
|
||||
for node in placeholder_node.dependencies():
|
||||
for idx in range(placeholder_node.inputs()):
|
||||
if placeholder_node.input(idx) == node and input_node:
|
||||
input_node.setInput(0, node)
|
||||
|
||||
def _create_sib_copies(self, placeholder):
|
||||
""" creating copies of the palce_holder siblings (the ones who were
|
||||
created with it) for the new nodes added
|
||||
|
||||
Returns :
|
||||
copies (dict) : with copied nodes names and their copies
|
||||
"""
|
||||
|
||||
copies = {}
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
for node in siblings:
|
||||
new_node = duplicate_node(node)
|
||||
|
||||
x_init = int(new_node.knob("x_init").getValue())
|
||||
y_init = int(new_node.knob("y_init").getValue())
|
||||
new_node.setXYpos(x_init, y_init)
|
||||
if isinstance(new_node, nuke.BackdropNode):
|
||||
w_init = new_node.knob("w_init").getValue()
|
||||
h_init = new_node.knob("h_init").getValue()
|
||||
new_node.knob("bdwidth").setValue(w_init)
|
||||
new_node.knob("bdheight").setValue(h_init)
|
||||
refresh_node(node)
|
||||
|
||||
if "repre_id" in node.knobs().keys():
|
||||
node.removeKnob(node.knob("repre_id"))
|
||||
copies[node.name()] = new_node
|
||||
return copies
|
||||
|
||||
def _set_copies_connections(self, placeholder, copies):
|
||||
"""Set inputs and outputs of the copies.
|
||||
|
||||
Args:
|
||||
copies (dict): Copied nodes by their names.
|
||||
"""
|
||||
|
||||
last_input, last_output = get_group_io_nodes(
|
||||
placeholder.data["last_created"]
|
||||
)
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
siblings_input, siblings_output = get_group_io_nodes(siblings)
|
||||
copy_input = copies[siblings_input.name()]
|
||||
copy_output = copies[siblings_output.name()]
|
||||
|
||||
for node_init in siblings:
|
||||
if node_init == siblings_output:
|
||||
continue
|
||||
|
||||
node_copy = copies[node_init.name()]
|
||||
for node in node_init.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) != node_init:
|
||||
continue
|
||||
|
||||
if node in siblings:
|
||||
copies[node.name()].setInput(idx, node_copy)
|
||||
else:
|
||||
last_input.setInput(0, node_copy)
|
||||
|
||||
for node in node_init.dependencies():
|
||||
for idx in range(node_init.inputs()):
|
||||
if node_init.input(idx) != node:
|
||||
continue
|
||||
|
||||
if node_init == siblings_input:
|
||||
copy_input.setInput(idx, node)
|
||||
elif node in siblings:
|
||||
node_copy.setInput(idx, copies[node.name()])
|
||||
else:
|
||||
node_copy.setInput(idx, last_output)
|
||||
|
||||
siblings_input.setInput(0, copy_output)
|
||||
|
||||
|
||||
def build_workfile_template(*args):
|
||||
builder = NukeTemplateBuilder(registered_host())
|
||||
builder.build_template()
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ class LoadBackdropNodes(load.LoaderPlugin):
|
|||
representations = ["nk"]
|
||||
families = ["workfile", "nukenodes"]
|
||||
|
||||
label = "Iport Nuke Nodes"
|
||||
label = "Import Nuke Nodes"
|
||||
order = 0
|
||||
icon = "eye"
|
||||
color = "white"
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import socket
|
|||
import getpass
|
||||
|
||||
from openpype_modules.ftrack.lib import BaseAction
|
||||
from openpype_modules.ftrack.ftrack_server.lib import get_host_ip
|
||||
|
||||
|
||||
class ActionWhereIRun(BaseAction):
|
||||
|
|
@ -53,8 +54,7 @@ class ActionWhereIRun(BaseAction):
|
|||
try:
|
||||
host_name = socket.gethostname()
|
||||
msgs["Hostname"] = host_name
|
||||
host_ip = socket.gethostbyname(host_name)
|
||||
msgs["IP"] = host_ip
|
||||
msgs["IP"] = get_host_ip() or "N/A"
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ from openpype_modules.ftrack import (
|
|||
)
|
||||
from openpype_modules.ftrack.lib import credentials
|
||||
from openpype_modules.ftrack.ftrack_server import socket_thread
|
||||
from openpype_modules.ftrack.ftrack_server.lib import get_host_ip
|
||||
|
||||
|
||||
class MongoPermissionsError(Exception):
|
||||
|
|
@ -245,11 +246,13 @@ def main_loop(ftrack_url):
|
|||
)
|
||||
|
||||
host_name = socket.gethostname()
|
||||
host_ip = get_host_ip()
|
||||
|
||||
main_info = [
|
||||
["created_at", datetime.datetime.now().strftime("%Y.%m.%d %H:%M:%S")],
|
||||
["Username", getpass.getuser()],
|
||||
["Host Name", host_name],
|
||||
["Host IP", socket.gethostbyname(host_name)],
|
||||
["Host IP", host_ip or "N/A"],
|
||||
["OpenPype executable", get_openpype_execute_args()[-1]],
|
||||
["OpenPype version", get_openpype_version() or "N/A"],
|
||||
["OpenPype build version", get_build_version() or "N/A"]
|
||||
|
|
|
|||
|
|
@ -9,8 +9,9 @@ import time
|
|||
import queue
|
||||
import collections
|
||||
import appdirs
|
||||
import pymongo
|
||||
import socket
|
||||
|
||||
import pymongo
|
||||
import requests
|
||||
import ftrack_api
|
||||
import ftrack_api.session
|
||||
|
|
@ -32,6 +33,16 @@ TOPIC_STATUS_SERVER = "openpype.event.server.status"
|
|||
TOPIC_STATUS_SERVER_RESULT = "openpype.event.server.status.result"
|
||||
|
||||
|
||||
def get_host_ip():
|
||||
host_name = socket.gethostname()
|
||||
try:
|
||||
return socket.gethostbyname(host_name)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class SocketBaseEventHub(ftrack_api.event.hub.EventHub):
|
||||
|
||||
hearbeat_msg = b"hearbeat"
|
||||
|
|
|
|||
|
|
@ -15,7 +15,8 @@ from openpype_modules.ftrack.ftrack_server.lib import (
|
|||
SocketSession,
|
||||
StatusEventHub,
|
||||
TOPIC_STATUS_SERVER,
|
||||
TOPIC_STATUS_SERVER_RESULT
|
||||
TOPIC_STATUS_SERVER_RESULT,
|
||||
get_host_ip
|
||||
)
|
||||
from openpype.lib import (
|
||||
Logger,
|
||||
|
|
@ -29,10 +30,10 @@ log = Logger.get_logger("Event storer")
|
|||
action_identifier = (
|
||||
"event.server.status" + os.environ["FTRACK_EVENT_SUB_ID"]
|
||||
)
|
||||
host_ip = socket.gethostbyname(socket.gethostname())
|
||||
host_ip = get_host_ip()
|
||||
action_data = {
|
||||
"label": "OpenPype Admin",
|
||||
"variant": "- Event server Status ({})".format(host_ip),
|
||||
"variant": "- Event server Status ({})".format(host_ip or "IP N/A"),
|
||||
"description": "Get Infromation about event server",
|
||||
"actionIdentifier": action_identifier
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.lib.profiles_filtering import filter_profiles
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.lib import attribute_definitions
|
||||
from openpype.pipeline import OpenPypePyblishPluginMixin
|
||||
|
||||
|
||||
class CollectSlackFamilies(pyblish.api.InstancePlugin):
|
||||
class CollectSlackFamilies(pyblish.api.InstancePlugin,
|
||||
OpenPypePyblishPluginMixin):
|
||||
"""Collect family for Slack notification
|
||||
|
||||
Expects configured profile in
|
||||
|
|
@ -17,6 +19,18 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin):
|
|||
|
||||
profiles = None
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
attribute_definitions.TextDef(
|
||||
# Key under which it will be stored
|
||||
"additional_message",
|
||||
# Use plugin label as label for attribute
|
||||
label="Additional Slack message",
|
||||
placeholder="<Only if Slack is configured>"
|
||||
)
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
task_data = instance.data["anatomyData"].get("task", {})
|
||||
family = self.main_family_from_instance(instance)
|
||||
|
|
@ -55,6 +69,11 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin):
|
|||
["token"])
|
||||
instance.data["slack_token"] = slack_token
|
||||
|
||||
attribute_values = self.get_attr_values_from_data(instance.data)
|
||||
additional_message = attribute_values.get("additional_message")
|
||||
if additional_message:
|
||||
instance.data["slack_additional_message"] = additional_message
|
||||
|
||||
def main_family_from_instance(self, instance): # TODO yank from integrate
|
||||
"""Returns main family of entered instance."""
|
||||
family = instance.data.get("family")
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@ import six
|
|||
import pyblish.api
|
||||
import copy
|
||||
from datetime import datetime
|
||||
from abc import ABCMeta, abstractmethod
|
||||
import time
|
||||
|
||||
from openpype.client import OpenPypeMongoConnection
|
||||
from openpype.lib.plugin_tools import prepare_template_data
|
||||
|
|
@ -32,11 +34,15 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
review_path = self._get_review_path(instance)
|
||||
|
||||
publish_files = set()
|
||||
message = ''
|
||||
additional_message = instance.data.get("slack_additional_message")
|
||||
token = instance.data["slack_token"]
|
||||
if additional_message:
|
||||
message = "{} \n".format(additional_message)
|
||||
for message_profile in instance.data["slack_channel_message_profiles"]:
|
||||
message = self._get_filled_message(message_profile["message"],
|
||||
instance,
|
||||
review_path)
|
||||
self.log.debug("message:: {}".format(message))
|
||||
message += self._get_filled_message(message_profile["message"],
|
||||
instance,
|
||||
review_path)
|
||||
if not message:
|
||||
return
|
||||
|
||||
|
|
@ -50,18 +56,16 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
project = instance.context.data["anatomyData"]["project"]["code"]
|
||||
for channel in message_profile["channels"]:
|
||||
if six.PY2:
|
||||
msg_id, file_ids = \
|
||||
self._python2_call(instance.data["slack_token"],
|
||||
channel,
|
||||
message,
|
||||
publish_files)
|
||||
client = SlackPython2Operations(token, self.log)
|
||||
else:
|
||||
msg_id, file_ids = \
|
||||
self._python3_call(instance.data["slack_token"],
|
||||
channel,
|
||||
message,
|
||||
publish_files)
|
||||
client = SlackPython3Operations(token, self.log)
|
||||
|
||||
users, groups = client.get_users_and_groups()
|
||||
message = self._translate_users(message, users, groups)
|
||||
|
||||
msg_id, file_ids = client.send_message(channel,
|
||||
message,
|
||||
publish_files)
|
||||
if not msg_id:
|
||||
return
|
||||
|
||||
|
|
@ -179,15 +183,233 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
break
|
||||
return review_path
|
||||
|
||||
def _python2_call(self, token, channel, message, publish_files):
|
||||
from slackclient import SlackClient
|
||||
def _get_user_id(self, users, user_name):
|
||||
"""Returns internal slack id for user name"""
|
||||
user_id = None
|
||||
user_name_lower = user_name.lower()
|
||||
for user in users:
|
||||
if (not user.get("deleted") and
|
||||
(user_name_lower == user["name"].lower() or
|
||||
# bots dont have display_name
|
||||
user_name_lower == user["profile"].get("display_name",
|
||||
'').lower() or
|
||||
user_name_lower == user["profile"].get("real_name",
|
||||
'').lower())):
|
||||
user_id = user["id"]
|
||||
break
|
||||
return user_id
|
||||
|
||||
def _get_group_id(self, groups, group_name):
|
||||
"""Returns internal group id for string name"""
|
||||
group_id = None
|
||||
for group in groups:
|
||||
if (not group.get("date_delete") and
|
||||
(group_name.lower() == group["name"].lower() or
|
||||
group_name.lower() == group["handle"])):
|
||||
group_id = group["id"]
|
||||
break
|
||||
return group_id
|
||||
|
||||
def _translate_users(self, message, users, groups):
|
||||
"""Replace all occurences of @mentions with proper <@name> format."""
|
||||
matches = re.findall(r"(?<!<)@[^ ]+", message)
|
||||
in_quotes = re.findall(r"(?<!<)(['\"])(@[^'\"]+)", message)
|
||||
for item in in_quotes:
|
||||
matches.append(item[1])
|
||||
if not matches:
|
||||
return message
|
||||
|
||||
for orig_user in matches:
|
||||
user_name = orig_user.replace("@", '')
|
||||
slack_id = self._get_user_id(users, user_name)
|
||||
mention = None
|
||||
if slack_id:
|
||||
mention = "<@{}>".format(slack_id)
|
||||
else:
|
||||
slack_id = self._get_group_id(groups, user_name)
|
||||
if slack_id:
|
||||
mention = "<!subteam^{}>".format(slack_id)
|
||||
if mention:
|
||||
message = message.replace(orig_user, mention)
|
||||
|
||||
return message
|
||||
|
||||
def _escape_missing_keys(self, message, fill_data):
|
||||
"""Double escapes placeholder which are missing in 'fill_data'"""
|
||||
placeholder_keys = re.findall(r"\{([^}]+)\}", message)
|
||||
|
||||
fill_keys = []
|
||||
for key, value in fill_data.items():
|
||||
fill_keys.append(key)
|
||||
if isinstance(value, dict):
|
||||
for child_key in value.keys():
|
||||
fill_keys.append("{}[{}]".format(key, child_key))
|
||||
|
||||
not_matched = set(placeholder_keys) - set(fill_keys)
|
||||
|
||||
for not_matched_item in not_matched:
|
||||
message = message.replace("{}".format(not_matched_item),
|
||||
"{{{}}}".format(not_matched_item))
|
||||
|
||||
return message
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class AbstractSlackOperations:
|
||||
|
||||
@abstractmethod
|
||||
def _get_users_list(self):
|
||||
"""Return response with user list, different methods Python 2 vs 3"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def _get_usergroups_list(self):
|
||||
"""Return response with user list, different methods Python 2 vs 3"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def get_users_and_groups(self):
|
||||
"""Return users and groups, different retry in Python 2 vs 3"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def send_message(self, channel, message, publish_files):
|
||||
"""Sends message to channel, different methods in Python 2 vs 3"""
|
||||
pass
|
||||
|
||||
def _get_users(self):
|
||||
"""Parse users.list response into list of users (dicts)"""
|
||||
first = True
|
||||
next_page = None
|
||||
users = []
|
||||
while first or next_page:
|
||||
response = self._get_users_list()
|
||||
first = False
|
||||
next_page = response.get("response_metadata").get("next_cursor")
|
||||
for user in response.get("members"):
|
||||
users.append(user)
|
||||
|
||||
return users
|
||||
|
||||
def _get_groups(self):
|
||||
"""Parses usergroups.list response into list of groups (dicts)"""
|
||||
response = self._get_usergroups_list()
|
||||
groups = []
|
||||
for group in response.get("usergroups"):
|
||||
groups.append(group)
|
||||
return groups
|
||||
|
||||
def _enrich_error(self, error_str, channel):
|
||||
"""Enhance known errors with more helpful notations."""
|
||||
if 'not_in_channel' in error_str:
|
||||
# there is no file.write.public scope, app must be explicitly in
|
||||
# the channel
|
||||
msg = " - application must added to channel '{}'.".format(channel)
|
||||
error_str += msg + " Ask Slack admin."
|
||||
return error_str
|
||||
|
||||
|
||||
class SlackPython3Operations(AbstractSlackOperations):
|
||||
|
||||
def __init__(self, token, log):
|
||||
from slack_sdk import WebClient
|
||||
|
||||
self.client = WebClient(token=token)
|
||||
self.log = log
|
||||
|
||||
def _get_users_list(self):
|
||||
return self.client.users_list()
|
||||
|
||||
def _get_usergroups_list(self):
|
||||
return self.client.usergroups_list()
|
||||
|
||||
def get_users_and_groups(self):
|
||||
from slack_sdk.errors import SlackApiError
|
||||
while True:
|
||||
try:
|
||||
users = self._get_users()
|
||||
groups = self._get_groups()
|
||||
break
|
||||
except SlackApiError as e:
|
||||
retry_after = e.response.headers.get("Retry-After")
|
||||
if retry_after:
|
||||
print(
|
||||
"Rate limit hit, sleeping for {}".format(retry_after))
|
||||
time.sleep(int(retry_after))
|
||||
else:
|
||||
self.log.warning("Cannot pull user info, "
|
||||
"mentions won't work", exc_info=True)
|
||||
return [], []
|
||||
|
||||
return users, groups
|
||||
|
||||
def send_message(self, channel, message, publish_files):
|
||||
from slack_sdk.errors import SlackApiError
|
||||
try:
|
||||
attachment_str = "\n\n Attachment links: \n"
|
||||
file_ids = []
|
||||
for published_file in publish_files:
|
||||
response = self.client.files_upload(
|
||||
file=published_file,
|
||||
filename=os.path.basename(published_file))
|
||||
attachment_str += "\n<{}|{}>".format(
|
||||
response["file"]["permalink"],
|
||||
os.path.basename(published_file))
|
||||
file_ids.append(response["file"]["id"])
|
||||
|
||||
if publish_files:
|
||||
message += attachment_str
|
||||
|
||||
response = self.client.chat_postMessage(
|
||||
channel=channel,
|
||||
text=message
|
||||
)
|
||||
return response.data["ts"], file_ids
|
||||
except SlackApiError as e:
|
||||
# # You will get a SlackApiError if "ok" is False
|
||||
error_str = self._enrich_error(str(e.response["error"]), channel)
|
||||
self.log.warning("Error happened {}".format(error_str))
|
||||
except Exception as e:
|
||||
error_str = self._enrich_error(str(e), channel)
|
||||
self.log.warning("Not SlackAPI error", exc_info=True)
|
||||
|
||||
return None, []
|
||||
|
||||
|
||||
class SlackPython2Operations(AbstractSlackOperations):
|
||||
|
||||
def __init__(self, token, log):
|
||||
from slackclient import SlackClient
|
||||
|
||||
self.client = SlackClient(token=token)
|
||||
self.log = log
|
||||
|
||||
def _get_users_list(self):
|
||||
return self.client.api_call("users.list")
|
||||
|
||||
def _get_usergroups_list(self):
|
||||
return self.client.api_call("usergroups.list")
|
||||
|
||||
def get_users_and_groups(self):
|
||||
while True:
|
||||
try:
|
||||
users = self._get_users()
|
||||
groups = self._get_groups()
|
||||
break
|
||||
except Exception:
|
||||
self.log.warning("Cannot pull user info, "
|
||||
"mentions won't work", exc_info=True)
|
||||
return [], []
|
||||
|
||||
return users, groups
|
||||
|
||||
def send_message(self, channel, message, publish_files):
|
||||
try:
|
||||
client = SlackClient(token)
|
||||
attachment_str = "\n\n Attachment links: \n"
|
||||
file_ids = []
|
||||
for p_file in publish_files:
|
||||
with open(p_file, 'rb') as pf:
|
||||
response = client.api_call(
|
||||
response = self.client.api_call(
|
||||
"files.upload",
|
||||
file=pf,
|
||||
channel=channel,
|
||||
|
|
@ -208,7 +430,7 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
if publish_files:
|
||||
message += attachment_str
|
||||
|
||||
response = client.api_call(
|
||||
response = self.client.api_call(
|
||||
"chat.postMessage",
|
||||
channel=channel,
|
||||
text=message
|
||||
|
|
@ -225,65 +447,3 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
self.log.warning("Error happened: {}".format(error_str))
|
||||
|
||||
return None, []
|
||||
|
||||
def _python3_call(self, token, channel, message, publish_files):
|
||||
from slack_sdk import WebClient
|
||||
from slack_sdk.errors import SlackApiError
|
||||
try:
|
||||
client = WebClient(token=token)
|
||||
attachment_str = "\n\n Attachment links: \n"
|
||||
file_ids = []
|
||||
for published_file in publish_files:
|
||||
response = client.files_upload(
|
||||
file=published_file,
|
||||
filename=os.path.basename(published_file))
|
||||
attachment_str += "\n<{}|{}>".format(
|
||||
response["file"]["permalink"],
|
||||
os.path.basename(published_file))
|
||||
file_ids.append(response["file"]["id"])
|
||||
|
||||
if publish_files:
|
||||
message += attachment_str
|
||||
|
||||
response = client.chat_postMessage(
|
||||
channel=channel,
|
||||
text=message
|
||||
)
|
||||
return response.data["ts"], file_ids
|
||||
except SlackApiError as e:
|
||||
# You will get a SlackApiError if "ok" is False
|
||||
error_str = self._enrich_error(str(e.response["error"]), channel)
|
||||
self.log.warning("Error happened {}".format(error_str))
|
||||
except Exception as e:
|
||||
error_str = self._enrich_error(str(e), channel)
|
||||
self.log.warning("Not SlackAPI error", exc_info=True)
|
||||
|
||||
return None, []
|
||||
|
||||
def _enrich_error(self, error_str, channel):
|
||||
"""Enhance known errors with more helpful notations."""
|
||||
if 'not_in_channel' in error_str:
|
||||
# there is no file.write.public scope, app must be explicitly in
|
||||
# the channel
|
||||
msg = " - application must added to channel '{}'.".format(channel)
|
||||
error_str += msg + " Ask Slack admin."
|
||||
return error_str
|
||||
|
||||
def _escape_missing_keys(self, message, fill_data):
|
||||
"""Double escapes placeholder which are missing in 'fill_data'"""
|
||||
placeholder_keys = re.findall("\{([^}]+)\}", message)
|
||||
|
||||
fill_keys = []
|
||||
for key, value in fill_data.items():
|
||||
fill_keys.append(key)
|
||||
if isinstance(value, dict):
|
||||
for child_key in value.keys():
|
||||
fill_keys.append("{}[{}]".format(key, child_key))
|
||||
|
||||
not_matched = set(placeholder_keys) - set(fill_keys)
|
||||
|
||||
for not_matched_item in not_matched:
|
||||
message = message.replace("{}".format(not_matched_item),
|
||||
"{{{}}}".format(not_matched_item))
|
||||
|
||||
return message
|
||||
|
|
|
|||
|
|
@ -165,7 +165,7 @@ class DropboxHandler(AbstractProvider):
|
|||
Returns:
|
||||
(boolean)
|
||||
"""
|
||||
return self.presets["enabled"] and self.dbx is not None
|
||||
return self.presets.get("enabled") and self.dbx is not None
|
||||
|
||||
@classmethod
|
||||
def get_configurable_items(cls):
|
||||
|
|
|
|||
|
|
@ -119,7 +119,7 @@ class GDriveHandler(AbstractProvider):
|
|||
Returns:
|
||||
(boolean)
|
||||
"""
|
||||
return self.presets["enabled"] and self.service is not None
|
||||
return self.presets.get("enabled") and self.service is not None
|
||||
|
||||
@classmethod
|
||||
def get_system_settings_schema(cls):
|
||||
|
|
|
|||
|
|
@ -169,7 +169,7 @@ def resolve_paths(module, file_path, project_name,
|
|||
return local_file_path, remote_file_path
|
||||
|
||||
|
||||
def site_is_working(module, project_name, site_name):
|
||||
def _site_is_working(module, project_name, site_name, site_config):
|
||||
"""
|
||||
Confirm that 'site_name' is configured correctly for 'project_name'.
|
||||
|
||||
|
|
@ -179,54 +179,17 @@ def site_is_working(module, project_name, site_name):
|
|||
module (SyncServerModule)
|
||||
project_name(string):
|
||||
site_name(string):
|
||||
site_config (dict): configuration for site from Settings
|
||||
Returns
|
||||
(bool)
|
||||
"""
|
||||
if _get_configured_sites(module, project_name).get(site_name):
|
||||
return True
|
||||
return False
|
||||
provider = module.get_provider_for_site(site=site_name)
|
||||
handler = lib.factory.get_provider(provider,
|
||||
project_name,
|
||||
site_name,
|
||||
presets=site_config)
|
||||
|
||||
|
||||
def _get_configured_sites(module, project_name):
|
||||
"""
|
||||
Loops through settings and looks for configured sites and checks
|
||||
its handlers for particular 'project_name'.
|
||||
|
||||
Args:
|
||||
project_setting(dict): dictionary from Settings
|
||||
only_project_name(string, optional): only interested in
|
||||
particular project
|
||||
Returns:
|
||||
(dict of dict)
|
||||
{'ProjectA': {'studio':True, 'gdrive':False}}
|
||||
"""
|
||||
settings = module.get_sync_project_setting(project_name)
|
||||
return _get_configured_sites_from_setting(module, project_name, settings)
|
||||
|
||||
|
||||
def _get_configured_sites_from_setting(module, project_name, project_setting):
|
||||
if not project_setting.get("enabled"):
|
||||
return {}
|
||||
|
||||
initiated_handlers = {}
|
||||
configured_sites = {}
|
||||
all_sites = module._get_default_site_configs()
|
||||
all_sites.update(project_setting.get("sites"))
|
||||
for site_name, config in all_sites.items():
|
||||
provider = module.get_provider_for_site(site=site_name)
|
||||
handler = initiated_handlers.get((provider, site_name))
|
||||
if not handler:
|
||||
handler = lib.factory.get_provider(provider,
|
||||
project_name,
|
||||
site_name,
|
||||
presets=config)
|
||||
initiated_handlers[(provider, site_name)] = \
|
||||
handler
|
||||
|
||||
if handler.is_active():
|
||||
configured_sites[site_name] = True
|
||||
|
||||
return configured_sites
|
||||
return handler.is_active()
|
||||
|
||||
|
||||
class SyncServerThread(threading.Thread):
|
||||
|
|
@ -288,7 +251,8 @@ class SyncServerThread(threading.Thread):
|
|||
for project_name in enabled_projects:
|
||||
preset = self.module.sync_project_settings[project_name]
|
||||
|
||||
local_site, remote_site = self._working_sites(project_name)
|
||||
local_site, remote_site = self._working_sites(project_name,
|
||||
preset)
|
||||
if not all([local_site, remote_site]):
|
||||
continue
|
||||
|
||||
|
|
@ -464,7 +428,7 @@ class SyncServerThread(threading.Thread):
|
|||
self.timer.cancel()
|
||||
self.timer = None
|
||||
|
||||
def _working_sites(self, project_name):
|
||||
def _working_sites(self, project_name, sync_config):
|
||||
if self.module.is_project_paused(project_name):
|
||||
self.log.debug("Both sites same, skipping")
|
||||
return None, None
|
||||
|
|
@ -476,9 +440,12 @@ class SyncServerThread(threading.Thread):
|
|||
local_site, remote_site))
|
||||
return None, None
|
||||
|
||||
configured_sites = _get_configured_sites(self.module, project_name)
|
||||
if not all([local_site in configured_sites,
|
||||
remote_site in configured_sites]):
|
||||
local_site_config = sync_config.get('sites')[local_site]
|
||||
remote_site_config = sync_config.get('sites')[remote_site]
|
||||
if not all([_site_is_working(self.module, project_name, local_site,
|
||||
local_site_config),
|
||||
_site_is_working(self.module, project_name, remote_site,
|
||||
remote_site_config)]):
|
||||
self.log.debug(
|
||||
"Some of the sites {} - {} is not working properly".format(
|
||||
local_site, remote_site
|
||||
|
|
|
|||
|
|
@ -608,7 +608,7 @@ def discover_legacy_creator_plugins():
|
|||
plugin.apply_settings(project_settings, system_settings)
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Failed to apply settings to loader {}".format(
|
||||
"Failed to apply settings to creator {}".format(
|
||||
plugin.__name__
|
||||
),
|
||||
exc_info=True
|
||||
|
|
|
|||
|
|
@ -42,7 +42,9 @@ from openpype.pipeline.load import (
|
|||
get_contexts_for_repre_docs,
|
||||
load_with_repre_context,
|
||||
)
|
||||
from openpype.pipeline.create import get_legacy_creator_by_name
|
||||
from openpype.pipeline.create import (
|
||||
discover_legacy_creator_plugins
|
||||
)
|
||||
|
||||
|
||||
class TemplateNotFound(Exception):
|
||||
|
|
@ -235,7 +237,14 @@ class AbstractTemplateBuilder(object):
|
|||
|
||||
def get_creators_by_name(self):
|
||||
if self._creators_by_name is None:
|
||||
self._creators_by_name = get_legacy_creator_by_name()
|
||||
self._creators_by_name = {}
|
||||
for creator in discover_legacy_creator_plugins():
|
||||
creator_name = creator.__name__
|
||||
if creator_name in self._creators_by_name:
|
||||
raise KeyError(
|
||||
"Duplicated creator name {} !".format(creator_name)
|
||||
)
|
||||
self._creators_by_name[creator_name] = creator
|
||||
return self._creators_by_name
|
||||
|
||||
def get_shared_data(self, key):
|
||||
|
|
@ -401,7 +410,12 @@ class AbstractTemplateBuilder(object):
|
|||
key=lambda i: i.order
|
||||
))
|
||||
|
||||
def build_template(self, template_path=None, level_limit=None):
|
||||
def build_template(
|
||||
self,
|
||||
template_path=None,
|
||||
level_limit=None,
|
||||
keep_placeholders=None
|
||||
):
|
||||
"""Main callback for building workfile from template path.
|
||||
|
||||
Todo:
|
||||
|
|
@ -410,16 +424,25 @@ class AbstractTemplateBuilder(object):
|
|||
|
||||
Args:
|
||||
template_path (str): Path to a template file with placeholders.
|
||||
Template from settings 'get_template_path' used when not
|
||||
Template from settings 'get_template_preset' used when not
|
||||
passed.
|
||||
level_limit (int): Limit of populate loops. Related to
|
||||
'populate_scene_placeholders' method.
|
||||
keep_placeholders (bool): Add flag to placeholder data for
|
||||
hosts to decide if they want to remove
|
||||
placeholder after it is used.
|
||||
"""
|
||||
template_preset = self.get_template_preset()
|
||||
|
||||
if template_path is None:
|
||||
template_path = self.get_template_path()
|
||||
template_path = template_preset["path"]
|
||||
|
||||
if keep_placeholders is None:
|
||||
keep_placeholders = template_preset["keep_placeholder"]
|
||||
|
||||
self.import_template(template_path)
|
||||
self.populate_scene_placeholders(level_limit)
|
||||
self.populate_scene_placeholders(
|
||||
level_limit, keep_placeholders)
|
||||
|
||||
def rebuild_template(self):
|
||||
"""Go through existing placeholders in scene and update them.
|
||||
|
|
@ -489,7 +512,9 @@ class AbstractTemplateBuilder(object):
|
|||
plugin = plugins_by_identifier[identifier]
|
||||
plugin.prepare_placeholders(placeholders)
|
||||
|
||||
def populate_scene_placeholders(self, level_limit=None):
|
||||
def populate_scene_placeholders(
|
||||
self, level_limit=None, keep_placeholders=None
|
||||
):
|
||||
"""Find placeholders in scene using plugins and process them.
|
||||
|
||||
This should happen after 'import_template'.
|
||||
|
|
@ -505,6 +530,9 @@ class AbstractTemplateBuilder(object):
|
|||
|
||||
Args:
|
||||
level_limit (int): Level of loops that can happen. Default is 1000.
|
||||
keep_placeholders (bool): Add flag to placeholder data for
|
||||
hosts to decide if they want to remove
|
||||
placeholder after it is used.
|
||||
"""
|
||||
|
||||
if not self.placeholder_plugins:
|
||||
|
|
@ -541,6 +569,11 @@ class AbstractTemplateBuilder(object):
|
|||
" is already in progress."
|
||||
))
|
||||
continue
|
||||
|
||||
# add flag for keeping placeholders in scene
|
||||
# after they are processed
|
||||
placeholder.data["keep_placeholder"] = keep_placeholders
|
||||
|
||||
filtered_placeholders.append(placeholder)
|
||||
|
||||
self._prepare_placeholders(filtered_placeholders)
|
||||
|
|
@ -599,8 +632,8 @@ class AbstractTemplateBuilder(object):
|
|||
["profiles"]
|
||||
)
|
||||
|
||||
def get_template_path(self):
|
||||
"""Unified way how template path is received usign settings.
|
||||
def get_template_preset(self):
|
||||
"""Unified way how template preset is received usign settings.
|
||||
|
||||
Method is dependent on '_get_build_profiles' which should return filter
|
||||
profiles to resolve path to a template. Default implementation looks
|
||||
|
|
@ -637,6 +670,13 @@ class AbstractTemplateBuilder(object):
|
|||
).format(task_name, task_type, host_name))
|
||||
|
||||
path = profile["path"]
|
||||
|
||||
# switch to remove placeholders after they are used
|
||||
keep_placeholder = profile.get("keep_placeholder")
|
||||
# backward compatibility, since default is True
|
||||
if keep_placeholder is None:
|
||||
keep_placeholder = True
|
||||
|
||||
if not path:
|
||||
raise TemplateLoadFailed((
|
||||
"Template path is not set.\n"
|
||||
|
|
@ -650,14 +690,24 @@ class AbstractTemplateBuilder(object):
|
|||
key: value
|
||||
for key, value in os.environ.items()
|
||||
}
|
||||
|
||||
fill_data["root"] = anatomy.roots
|
||||
fill_data["project"] = {
|
||||
"name": project_name,
|
||||
"code": anatomy["attributes"]["code"]
|
||||
}
|
||||
|
||||
|
||||
result = StringTemplate.format_template(path, fill_data)
|
||||
if result.solved:
|
||||
path = result.normalized()
|
||||
|
||||
if path and os.path.exists(path):
|
||||
self.log.info("Found template at: '{}'".format(path))
|
||||
return path
|
||||
return {
|
||||
"path": path,
|
||||
"keep_placeholder": keep_placeholder
|
||||
}
|
||||
|
||||
solved_path = None
|
||||
while True:
|
||||
|
|
@ -683,7 +733,10 @@ class AbstractTemplateBuilder(object):
|
|||
|
||||
self.log.info("Found template at: '{}'".format(solved_path))
|
||||
|
||||
return solved_path
|
||||
return {
|
||||
"path": solved_path,
|
||||
"keep_placeholder": keep_placeholder
|
||||
}
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
|
|
@ -1002,7 +1055,13 @@ class PlaceholderItem(object):
|
|||
return self._log
|
||||
|
||||
def __repr__(self):
|
||||
return "< {} {} >".format(self.__class__.__name__, self.name)
|
||||
name = None
|
||||
if hasattr("name", self):
|
||||
name = self.name
|
||||
if hasattr("_scene_identifier ", self):
|
||||
name = self._scene_identifier
|
||||
|
||||
return "< {} {} >".format(self.__class__.__name__, name)
|
||||
|
||||
@property
|
||||
def order(self):
|
||||
|
|
@ -1426,6 +1485,173 @@ class PlaceholderLoadMixin(object):
|
|||
pass
|
||||
|
||||
|
||||
class PlaceholderCreateMixin(object):
|
||||
"""Mixin prepared for creating placeholder plugins.
|
||||
|
||||
Implementation prepares options for placeholders with
|
||||
'get_create_plugin_options'.
|
||||
|
||||
For placeholder population is implemented 'populate_create_placeholder'.
|
||||
|
||||
PlaceholderItem can have implemented methods:
|
||||
- 'create_failed' - called when creating of an instance failed
|
||||
- 'create_succeed' - called when creating of an instance succeeded
|
||||
"""
|
||||
|
||||
def get_create_plugin_options(self, options=None):
|
||||
"""Unified attribute definitions for create placeholder.
|
||||
|
||||
Common function for placeholder plugins used for creating of
|
||||
publishable instances. Use it with 'get_placeholder_options'.
|
||||
|
||||
Args:
|
||||
plugin (PlaceholderPlugin): Plugin used for creating of
|
||||
publish instances.
|
||||
options (Dict[str, Any]): Already available options which are used
|
||||
as defaults for attributes.
|
||||
|
||||
Returns:
|
||||
List[AbtractAttrDef]: Attribute definitions common for create
|
||||
plugins.
|
||||
"""
|
||||
|
||||
creators_by_name = self.builder.get_creators_by_name()
|
||||
|
||||
creator_items = [
|
||||
(creator_name, creator.label or creator_name)
|
||||
for creator_name, creator in creators_by_name.items()
|
||||
]
|
||||
|
||||
creator_items.sort(key=lambda i: i[1])
|
||||
options = options or {}
|
||||
return [
|
||||
attribute_definitions.UISeparatorDef(),
|
||||
attribute_definitions.UILabelDef("Main attributes"),
|
||||
attribute_definitions.UISeparatorDef(),
|
||||
|
||||
attribute_definitions.EnumDef(
|
||||
"creator",
|
||||
label="Creator",
|
||||
default=options.get("creator"),
|
||||
items=creator_items,
|
||||
tooltip=(
|
||||
"Creator"
|
||||
"\nDefines what OpenPype creator will be used to"
|
||||
" create publishable instance."
|
||||
"\nUseable creator depends on current host's creator list."
|
||||
"\nField is case sensitive."
|
||||
)
|
||||
),
|
||||
attribute_definitions.TextDef(
|
||||
"create_variant",
|
||||
label="Variant",
|
||||
default=options.get("create_variant"),
|
||||
placeholder='Main',
|
||||
tooltip=(
|
||||
"Creator"
|
||||
"\nDefines variant name which will be use for "
|
||||
"\ncompiling of subset name."
|
||||
)
|
||||
),
|
||||
attribute_definitions.UISeparatorDef(),
|
||||
attribute_definitions.NumberDef(
|
||||
"order",
|
||||
label="Order",
|
||||
default=options.get("order") or 0,
|
||||
decimals=0,
|
||||
minimum=0,
|
||||
maximum=999,
|
||||
tooltip=(
|
||||
"Order"
|
||||
"\nOrder defines creating instance priority (0 to 999)"
|
||||
"\nPriority rule is : \"lowest is first to load\"."
|
||||
)
|
||||
)
|
||||
]
|
||||
|
||||
def populate_create_placeholder(self, placeholder):
|
||||
"""Create placeholder is going to create matching publishabe instance.
|
||||
|
||||
Args:
|
||||
placeholder (PlaceholderItem): Placeholder item with information
|
||||
about requested publishable instance.
|
||||
"""
|
||||
creator_name = placeholder.data["creator"]
|
||||
create_variant = placeholder.data["create_variant"]
|
||||
|
||||
creator_plugin = self.builder.get_creators_by_name()[creator_name]
|
||||
|
||||
# create subset name
|
||||
project_name = legacy_io.Session["AVALON_PROJECT"]
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
|
||||
# get asset id
|
||||
asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"])
|
||||
assert asset_doc, "No current asset found in Session"
|
||||
asset_id = asset_doc['_id']
|
||||
|
||||
subset_name = creator_plugin.get_subset_name(
|
||||
create_variant,
|
||||
task_name,
|
||||
asset_id,
|
||||
project_name
|
||||
)
|
||||
|
||||
creator_data = {
|
||||
"creator_name": creator_name,
|
||||
"create_variant": create_variant,
|
||||
"subset_name": subset_name,
|
||||
"creator_plugin": creator_plugin
|
||||
}
|
||||
|
||||
self._before_instance_create(placeholder)
|
||||
|
||||
# compile subset name from variant
|
||||
try:
|
||||
creator_instance = creator_plugin(
|
||||
subset_name,
|
||||
asset_name
|
||||
).process()
|
||||
|
||||
except Exception:
|
||||
failed = True
|
||||
self.create_failed(placeholder, creator_data)
|
||||
|
||||
else:
|
||||
failed = False
|
||||
self.create_succeed(placeholder, creator_instance)
|
||||
|
||||
self.cleanup_placeholder(placeholder, failed)
|
||||
|
||||
def create_failed(self, placeholder, creator_data):
|
||||
if hasattr(placeholder, "create_failed"):
|
||||
placeholder.create_failed(creator_data)
|
||||
|
||||
def create_succeed(self, placeholder, creator_instance):
|
||||
if hasattr(placeholder, "create_succeed"):
|
||||
placeholder.create_succeed(creator_instance)
|
||||
|
||||
def cleanup_placeholder(self, placeholder, failed):
|
||||
"""Cleanup placeholder after load of single representation.
|
||||
|
||||
Can be called multiple times during placeholder item populating and is
|
||||
called even if loading failed.
|
||||
|
||||
Args:
|
||||
placeholder (PlaceholderItem): Item which was just used to load
|
||||
representation.
|
||||
failed (bool): Loading of representation failed.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def _before_instance_create(self, placeholder):
|
||||
"""Can be overriden. Is called before instance is created."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class LoadPlaceholderItem(PlaceholderItem):
|
||||
"""PlaceholderItem for plugin which is loading representations.
|
||||
|
||||
|
|
@ -1449,3 +1675,28 @@ class LoadPlaceholderItem(PlaceholderItem):
|
|||
|
||||
def load_failed(self, representation):
|
||||
self._failed_representations.append(representation)
|
||||
|
||||
|
||||
class CreatePlaceholderItem(PlaceholderItem):
|
||||
"""PlaceholderItem for plugin which is creating publish instance.
|
||||
|
||||
Connected to 'PlaceholderCreateMixin'.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreatePlaceholderItem, self).__init__(*args, **kwargs)
|
||||
self._failed_created_publish_instances = []
|
||||
|
||||
def get_errors(self):
|
||||
if not self._failed_representations:
|
||||
return []
|
||||
message = (
|
||||
"Failed to create {} instance using Creator {}"
|
||||
).format(
|
||||
len(self._failed_created_publish_instances),
|
||||
self.data["creator"]
|
||||
)
|
||||
return [message]
|
||||
|
||||
def create_failed(self, creator_data):
|
||||
self._failed_created_publish_instances.append(creator_data)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
"deadline_servers": [],
|
||||
"publish": {
|
||||
"CollectDefaultDeadlineServer": {
|
||||
"pass_mongo_url": false
|
||||
"pass_mongo_url": true
|
||||
},
|
||||
"CollectDeadlinePools": {
|
||||
"primary_pool": "",
|
||||
|
|
|
|||
|
|
@ -25,8 +25,15 @@
|
|||
{
|
||||
"key": "path",
|
||||
"label": "Path to template",
|
||||
"type": "text",
|
||||
"object_type": "text"
|
||||
"type": "path",
|
||||
"multiplatform": false,
|
||||
"multipath": false
|
||||
},
|
||||
{
|
||||
"key": "keep_placeholder",
|
||||
"label": "Keep placeholders",
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -105,16 +105,19 @@ class ScrollMessageBox(QtWidgets.QDialog):
|
|||
content_widget = QtWidgets.QWidget(self)
|
||||
scroll_widget.setWidget(content_widget)
|
||||
|
||||
max_len = 0
|
||||
message_len = 0
|
||||
content_layout = QtWidgets.QVBoxLayout(content_widget)
|
||||
for message in messages:
|
||||
label_widget = QtWidgets.QLabel(message, content_widget)
|
||||
content_layout.addWidget(label_widget)
|
||||
max_len = max(max_len, len(message))
|
||||
message_len = max(message_len, len(message))
|
||||
|
||||
# guess size of scrollable area
|
||||
max_width = QtWidgets.QApplication.desktop().availableGeometry().width
|
||||
scroll_widget.setMinimumWidth(min(max_width, max_len * 6))
|
||||
desktop = QtWidgets.QApplication.desktop()
|
||||
max_width = desktop.availableGeometry().width()
|
||||
scroll_widget.setMinimumWidth(
|
||||
min(max_width, message_len * 6)
|
||||
)
|
||||
layout.addWidget(scroll_widget)
|
||||
|
||||
if not cancelable: # if no specific buttons OK only
|
||||
|
|
|
|||
|
|
@ -94,6 +94,16 @@ Few keys also have Capitalized and UPPERCASE format. Values will be modified acc
|
|||
Here you can find review {review_filepath}
|
||||
```
|
||||
|
||||
##### Dynamic message for artists
|
||||
If artists uses host with implemented Publisher (new UI for publishing, implemented in Tray Publisher, Adobe products etc), it is possible for
|
||||
them to add additional message (notification for specific users for example, artists must provide proper user id with '@').
|
||||
Additional message will be sent only if at least one profile, eg. one target channel is configured.
|
||||
All available template keys (see higher) could be used here as a placeholder too.
|
||||
|
||||
#### User or group notifications
|
||||
Message template or dynamic data could contain user or group notification, it must be in format @artist.name, '@John Doe' or "@admin group" for display name containing space.
|
||||
If value prefixed with @ is not resolved and Slack user is not found, message will contain same value (not translated by Slack into link and proper mention.)
|
||||
|
||||
#### Message retention
|
||||
Currently no purging of old messages is implemented in Openpype. Admins of Slack should set their own retention of messages and files per channel.
|
||||
(see https://slack.com/help/articles/203457187-Customize-message-and-file-retention-policies)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue