mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
Merge branch 'develop' into release/3.15.x
This commit is contained in:
commit
078c5ffb5c
72 changed files with 1629 additions and 332 deletions
|
|
@ -276,8 +276,8 @@ class CreatorWidget(QtWidgets.QDialog):
|
|||
elif v["type"] == "QSpinBox":
|
||||
data[k]["value"] = self.create_row(
|
||||
content_layout, "QSpinBox", v["label"],
|
||||
setRange=(1, 9999999), setValue=v["value"],
|
||||
setToolTip=tool_tip)
|
||||
setValue=v["value"], setMinimum=0,
|
||||
setMaximum=100000, setToolTip=tool_tip)
|
||||
return data
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ class ExtractThumnail(publish.Extractor):
|
|||
track_item_name, thumb_frame, ".png")
|
||||
thumb_path = os.path.join(staging_dir, thumb_file)
|
||||
|
||||
thumbnail = track_item.thumbnail(thumb_frame).save(
|
||||
thumbnail = track_item.thumbnail(thumb_frame, "colour").save(
|
||||
thumb_path,
|
||||
format='png'
|
||||
)
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class CreateHDA(plugin.HoudiniCreator):
|
|||
}
|
||||
return subset_name.lower() in existing_subset_names_low
|
||||
|
||||
def _create_instance_node(
|
||||
def create_instance_node(
|
||||
self, node_name, parent, node_type="geometry"):
|
||||
import hou
|
||||
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ class CollectInstanceActiveState(pyblish.api.InstancePlugin):
|
|||
|
||||
# Check bypass state and reverse
|
||||
active = True
|
||||
node = hou.node(instance.get("instance_node"))
|
||||
node = hou.node(instance.data.get("instance_node"))
|
||||
if hasattr(node, "isBypassed"):
|
||||
active = not node.isBypassed()
|
||||
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
rop = hou.node(instance.get("instance_node"))
|
||||
rop = hou.node(instance.data.get("instance_node"))
|
||||
|
||||
# Collect chunkSize
|
||||
chunk_size_parm = rop.parm("chunkSize")
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin):
|
|||
self.log.debug("No output node found..")
|
||||
return
|
||||
|
||||
rop_node = hou.node(instance.get("instance_node"))
|
||||
rop_node = hou.node(instance.data["instance_node"])
|
||||
|
||||
save_layers = []
|
||||
for layer in usdlib.get_configured_save_layers(rop_node):
|
||||
|
|
@ -56,6 +56,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin):
|
|||
layer_inst.data["subset"] = "__stub__"
|
||||
layer_inst.data["label"] = label
|
||||
layer_inst.data["asset"] = instance.data["asset"]
|
||||
layer_inst.data["instance_node"] = instance.data["instance_node"]
|
||||
# include same USD ROP
|
||||
layer_inst.append(rop_node)
|
||||
# include layer data
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class ExtractRedshiftProxy(publish.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
ropnode = hou.node(instance.get("instance_node"))
|
||||
ropnode = hou.node(instance.data.get("instance_node"))
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
# `.evalParm(parameter)` will make sure all tokens are resolved
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class ExtractUSD(publish.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
ropnode = hou.node(instance.get("instance_node"))
|
||||
ropnode = hou.node(instance.data.get("instance_node"))
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
output = ropnode.evalParm("lopoutput")
|
||||
|
|
|
|||
|
|
@ -187,7 +187,7 @@ class ExtractUSDLayered(publish.Extractor):
|
|||
|
||||
# Main ROP node, either a USD Rop or ROP network with
|
||||
# multiple USD ROPs
|
||||
node = hou.node(instance.get("instance_node"))
|
||||
node = hou.node(instance.data["instance_node"])
|
||||
|
||||
# Collect any output dependencies that have not been processed yet
|
||||
# during extraction of other instances
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class ExtractVDBCache(publish.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
ropnode = hou.node(instance.get("instance_node"))
|
||||
ropnode = hou.node(instance.data["instance_node"])
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
# `.evalParm(parameter)` will make sure all tokens are resolved
|
||||
|
|
|
|||
|
|
@ -37,8 +37,7 @@ class ValidateAnimationSettings(pyblish.api.InstancePlugin):
|
|||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
node = hou.node(instance.get("instance_node"))
|
||||
|
||||
node = hou.node(instance.data["instance_node"])
|
||||
# Check trange parm, 0 means Render Current Frame
|
||||
frame_range = node.evalParm("trange")
|
||||
if frame_range == 0:
|
||||
|
|
|
|||
|
|
@ -37,6 +37,6 @@ class ValidateBypassed(pyblish.api.InstancePlugin):
|
|||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
rop = hou.node(instance.get("instance_node"))
|
||||
rop = hou.node(instance.data["instance_node"])
|
||||
if hasattr(rop, "isBypassed") and rop.isBypassed():
|
||||
return [rop]
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
|
|||
)
|
||||
|
||||
if output_node is None:
|
||||
node = hou.node(instance.get("instance_node"))
|
||||
node = hou.node(instance.data.get("instance_node"))
|
||||
cls.log.error(
|
||||
"COP Output node in '%s' does not exist. "
|
||||
"Ensure a valid COP output path is set." % node.path()
|
||||
|
|
|
|||
|
|
@ -37,8 +37,7 @@ class ValidateFrameToken(pyblish.api.InstancePlugin):
|
|||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
node = hou.node(instance.get("instance_node"))
|
||||
|
||||
node = hou.node(instance.data["instance_node"])
|
||||
# Check trange parm, 0 means Render Current Frame
|
||||
frame_range = node.evalParm("trange")
|
||||
if frame_range == 0:
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ class ValidateNoErrors(pyblish.api.InstancePlugin):
|
|||
validate_nodes = []
|
||||
|
||||
if len(instance) > 0:
|
||||
validate_nodes.append(hou.node(instance.get("instance_node")))
|
||||
validate_nodes.append(hou.node(instance.data.get("instance_node")))
|
||||
output_node = instance.data.get("output_node")
|
||||
if output_node:
|
||||
validate_nodes.append(output_node)
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
rop = hou.node(instance.get("instance_node"))
|
||||
rop = hou.node(instance.data.get("instance_node"))
|
||||
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
|
||||
stage = lop_path.stage(apply_viewport_overrides=False)
|
||||
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ class ValidateUsdModel(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
rop = hou.node(instance.get("instance_node"))
|
||||
rop = hou.node(instance.data.get("instance_node"))
|
||||
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
|
||||
stage = lop_path.stage(apply_viewport_overrides=False)
|
||||
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin):
|
|||
output_node = instance.data["output_node"]
|
||||
|
||||
if output_node is None:
|
||||
node = hou.node(instance.get("instance_node"))
|
||||
node = hou.node(instance.data.get("instance_node"))
|
||||
cls.log.error(
|
||||
"USD node '%s' LOP path does not exist. "
|
||||
"Ensure a valid LOP path is set." % node.path()
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin):
|
|||
from pxr import UsdGeom
|
||||
import hou
|
||||
|
||||
rop = hou.node(instance.get("instance_node"))
|
||||
rop = hou.node(instance.data.get("instance_node"))
|
||||
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
|
||||
stage = lop_path.stage(apply_viewport_overrides=False)
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
rop = hou.node(instance.get("instance_node"))
|
||||
rop = hou.node(instance.data.get("instance_node"))
|
||||
workspace = rop.parent()
|
||||
|
||||
definition = workspace.type().definition()
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin):
|
|||
if node is None:
|
||||
cls.log.error(
|
||||
"SOP path is not correctly set on "
|
||||
"ROP node '%s'." % instance.get("instance_node")
|
||||
"ROP node '%s'." % instance.data.get("instance_node")
|
||||
)
|
||||
return [instance]
|
||||
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ class MayaTemplateBuilder(AbstractTemplateBuilder):
|
|||
|
||||
Args:
|
||||
path (str): A path to current template (usually given by
|
||||
get_template_path implementation)
|
||||
get_template_preset implementation)
|
||||
|
||||
Returns:
|
||||
bool: Wether the template was succesfully imported or not
|
||||
|
|
@ -240,7 +240,7 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
|
|||
cmds.setAttr(node + ".hiddenInOutliner", True)
|
||||
|
||||
def load_succeed(self, placeholder, container):
|
||||
self._parent_in_hierarhchy(placeholder, container)
|
||||
self._parent_in_hierarchy(placeholder, container)
|
||||
|
||||
def _parent_in_hierarchy(self, placeholder, container):
|
||||
"""Parent loaded container to placeholder's parent.
|
||||
|
|
|
|||
|
|
@ -72,15 +72,19 @@ class CreateRender(plugin.Creator):
|
|||
def __init__(self, *args, **kwargs):
|
||||
"""Constructor."""
|
||||
super(CreateRender, self).__init__(*args, **kwargs)
|
||||
deadline_settings = get_system_settings()["modules"]["deadline"]
|
||||
if not deadline_settings["enabled"]:
|
||||
self.deadline_servers = {}
|
||||
return
|
||||
|
||||
# Defaults
|
||||
self._project_settings = get_project_settings(
|
||||
legacy_io.Session["AVALON_PROJECT"])
|
||||
if self._project_settings["maya"]["RenderSettings"]["apply_render_settings"]: # noqa
|
||||
lib_rendersettings.RenderSettings().set_default_renderer_settings()
|
||||
|
||||
# Deadline-only
|
||||
manager = ModulesManager()
|
||||
deadline_settings = get_system_settings()["modules"]["deadline"]
|
||||
if not deadline_settings["enabled"]:
|
||||
self.deadline_servers = {}
|
||||
return
|
||||
self.deadline_module = manager.modules_by_name["deadline"]
|
||||
try:
|
||||
default_servers = deadline_settings["deadline_urls"]
|
||||
|
|
@ -193,8 +197,6 @@ class CreateRender(plugin.Creator):
|
|||
pool_names = []
|
||||
default_priority = 50
|
||||
|
||||
self.server_aliases = list(self.deadline_servers.keys())
|
||||
self.data["deadlineServers"] = self.server_aliases
|
||||
self.data["suspendPublishJob"] = False
|
||||
self.data["review"] = True
|
||||
self.data["extendFrames"] = False
|
||||
|
|
@ -233,6 +235,9 @@ class CreateRender(plugin.Creator):
|
|||
raise RuntimeError("Both Deadline and Muster are enabled")
|
||||
|
||||
if deadline_enabled:
|
||||
self.server_aliases = list(self.deadline_servers.keys())
|
||||
self.data["deadlineServers"] = self.server_aliases
|
||||
|
||||
try:
|
||||
deadline_url = self.deadline_servers["default"]
|
||||
except KeyError:
|
||||
|
|
@ -254,6 +259,19 @@ class CreateRender(plugin.Creator):
|
|||
default_priority)
|
||||
self.data["tile_priority"] = tile_priority
|
||||
|
||||
pool_setting = (self._project_settings["deadline"]
|
||||
["publish"]
|
||||
["CollectDeadlinePools"])
|
||||
primary_pool = pool_setting["primary_pool"]
|
||||
self.data["primaryPool"] = self._set_default_pool(pool_names,
|
||||
primary_pool)
|
||||
# We add a string "-" to allow the user to not
|
||||
# set any secondary pools
|
||||
pool_names = ["-"] + pool_names
|
||||
secondary_pool = pool_setting["secondary_pool"]
|
||||
self.data["secondaryPool"] = self._set_default_pool(pool_names,
|
||||
secondary_pool)
|
||||
|
||||
if muster_enabled:
|
||||
self.log.info(">>> Loading Muster credentials ...")
|
||||
self._load_credentials()
|
||||
|
|
@ -273,18 +291,6 @@ class CreateRender(plugin.Creator):
|
|||
self.log.info(" - pool: {}".format(pool["name"]))
|
||||
pool_names.append(pool["name"])
|
||||
|
||||
pool_setting = (self._project_settings["deadline"]
|
||||
["publish"]
|
||||
["CollectDeadlinePools"])
|
||||
primary_pool = pool_setting["primary_pool"]
|
||||
self.data["primaryPool"] = self._set_default_pool(pool_names,
|
||||
primary_pool)
|
||||
# We add a string "-" to allow the user to not
|
||||
# set any secondary pools
|
||||
pool_names = ["-"] + pool_names
|
||||
secondary_pool = pool_setting["secondary_pool"]
|
||||
self.data["secondaryPool"] = self._set_default_pool(pool_names,
|
||||
secondary_pool)
|
||||
self.options = {"useSelection": False} # Force no content
|
||||
|
||||
def _set_default_pool(self, pool_names, pool_value):
|
||||
|
|
|
|||
|
|
@ -23,8 +23,6 @@ class CameraWindow(QtWidgets.QDialog):
|
|||
self.setWindowFlags(self.windowFlags() | QtCore.Qt.FramelessWindowHint)
|
||||
|
||||
self.camera = None
|
||||
self.static_image_plane = False
|
||||
self.show_in_all_views = False
|
||||
|
||||
self.widgets = {
|
||||
"label": QtWidgets.QLabel("Select camera for image plane."),
|
||||
|
|
@ -45,8 +43,6 @@ class CameraWindow(QtWidgets.QDialog):
|
|||
for camera in cameras:
|
||||
self.widgets["list"].addItem(camera)
|
||||
|
||||
self.widgets["staticImagePlane"].setText("Make Image Plane Static")
|
||||
self.widgets["showInAllViews"].setText("Show Image Plane in All Views")
|
||||
|
||||
# Build buttons.
|
||||
layout = QtWidgets.QHBoxLayout(self.widgets["buttons"])
|
||||
|
|
@ -57,8 +53,6 @@ class CameraWindow(QtWidgets.QDialog):
|
|||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.addWidget(self.widgets["label"])
|
||||
layout.addWidget(self.widgets["list"])
|
||||
layout.addWidget(self.widgets["staticImagePlane"])
|
||||
layout.addWidget(self.widgets["showInAllViews"])
|
||||
layout.addWidget(self.widgets["buttons"])
|
||||
layout.addWidget(self.widgets["warning"])
|
||||
|
||||
|
|
@ -73,8 +67,6 @@ class CameraWindow(QtWidgets.QDialog):
|
|||
if self.camera is None:
|
||||
self.widgets["warning"].setVisible(True)
|
||||
return
|
||||
self.show_in_all_views = self.widgets["showInAllViews"].isChecked()
|
||||
self.static_image_plane = self.widgets["staticImagePlane"].isChecked()
|
||||
|
||||
self.close()
|
||||
|
||||
|
|
@ -82,7 +74,6 @@ class CameraWindow(QtWidgets.QDialog):
|
|||
self.camera = None
|
||||
self.close()
|
||||
|
||||
|
||||
class ImagePlaneLoader(load.LoaderPlugin):
|
||||
"""Specific loader of plate for image planes on selected camera."""
|
||||
|
||||
|
|
@ -106,12 +97,10 @@ class ImagePlaneLoader(load.LoaderPlugin):
|
|||
|
||||
# Get camera from user selection.
|
||||
camera = None
|
||||
is_static_image_plane = None
|
||||
is_in_all_views = None
|
||||
# is_static_image_plane = None
|
||||
# is_in_all_views = None
|
||||
if data:
|
||||
camera = pm.PyNode(data.get("camera"))
|
||||
is_static_image_plane = data.get("static_image_plane")
|
||||
is_in_all_views = data.get("in_all_views")
|
||||
|
||||
if not camera:
|
||||
cameras = pm.ls(type="camera")
|
||||
|
|
@ -119,11 +108,11 @@ class ImagePlaneLoader(load.LoaderPlugin):
|
|||
camera_names["Create new camera."] = "create_camera"
|
||||
window = CameraWindow(camera_names.keys())
|
||||
window.exec_()
|
||||
# Skip if no camera was selected (Dialog was closed)
|
||||
if window.camera not in camera_names:
|
||||
return
|
||||
camera = camera_names[window.camera]
|
||||
|
||||
is_static_image_plane = window.static_image_plane
|
||||
is_in_all_views = window.show_in_all_views
|
||||
|
||||
if camera == "create_camera":
|
||||
camera = pm.createNode("camera")
|
||||
|
||||
|
|
@ -139,18 +128,14 @@ class ImagePlaneLoader(load.LoaderPlugin):
|
|||
# Create image plane
|
||||
image_plane_transform, image_plane_shape = pm.imagePlane(
|
||||
fileName=context["representation"]["data"]["path"],
|
||||
camera=camera, showInAllViews=is_in_all_views
|
||||
)
|
||||
camera=camera)
|
||||
image_plane_shape.depth.set(image_plane_depth)
|
||||
|
||||
if is_static_image_plane:
|
||||
image_plane_shape.detach()
|
||||
image_plane_transform.setRotation(camera.getRotation())
|
||||
|
||||
start_frame = pm.playbackOptions(q=True, min=True)
|
||||
end_frame = pm.playbackOptions(q=True, max=True)
|
||||
|
||||
image_plane_shape.frameOffset.set(1 - start_frame)
|
||||
image_plane_shape.frameOffset.set(0)
|
||||
image_plane_shape.frameIn.set(start_frame)
|
||||
image_plane_shape.frameOut.set(end_frame)
|
||||
image_plane_shape.frameCache.set(end_frame)
|
||||
|
|
@ -180,9 +165,17 @@ class ImagePlaneLoader(load.LoaderPlugin):
|
|||
QtWidgets.QMessageBox.Cancel
|
||||
)
|
||||
if reply == QtWidgets.QMessageBox.Ok:
|
||||
pm.delete(
|
||||
image_plane_shape.listConnections(type="expression")[0]
|
||||
)
|
||||
# find the input and output of frame extension
|
||||
expressions = image_plane_shape.frameExtension.inputs()
|
||||
frame_ext_output = image_plane_shape.frameExtension.outputs()
|
||||
if expressions:
|
||||
# the "time1" node is non-deletable attr
|
||||
# in Maya, use disconnectAttr instead
|
||||
pm.disconnectAttr(expressions, frame_ext_output)
|
||||
|
||||
if not image_plane_shape.frameExtension.isFreeToChange():
|
||||
raise RuntimeError("Can't set frame extension for {}".format(image_plane_shape)) # noqa
|
||||
# get the node of time instead and set the time for it.
|
||||
image_plane_shape.frameExtension.set(start_frame)
|
||||
|
||||
new_nodes.extend(
|
||||
|
|
@ -233,7 +226,8 @@ class ImagePlaneLoader(load.LoaderPlugin):
|
|||
)
|
||||
start_frame = asset["data"]["frameStart"]
|
||||
end_frame = asset["data"]["frameEnd"]
|
||||
image_plane_shape.frameOffset.set(1 - start_frame)
|
||||
|
||||
image_plane_shape.frameOffset.set(0)
|
||||
image_plane_shape.frameIn.set(start_frame)
|
||||
image_plane_shape.frameOut.set(end_frame)
|
||||
image_plane_shape.frameCache.set(end_frame)
|
||||
|
|
|
|||
|
|
@ -174,9 +174,6 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
if "handles" in data:
|
||||
data["handleStart"] = data["handles"]
|
||||
data["handleEnd"] = data["handles"]
|
||||
else:
|
||||
data["handleStart"] = 0
|
||||
data["handleEnd"] = 0
|
||||
|
||||
data["frameStartHandle"] = data["frameStart"] - data["handleStart"] # noqa: E501
|
||||
data["frameEndHandle"] = data["frameEnd"] + data["handleEnd"] # noqa: E501
|
||||
|
|
|
|||
|
|
@ -5,6 +5,11 @@ from openpype.pipeline.publish import (
|
|||
RepairAction,
|
||||
ValidateContentsOrder,
|
||||
)
|
||||
from openpype.hosts.maya.api.lib_rendersetup import (
|
||||
get_attr_overrides,
|
||||
get_attr_in_layer,
|
||||
)
|
||||
from maya.app.renderSetup.model.override import AbsOverride
|
||||
|
||||
|
||||
class ValidateFrameRange(pyblish.api.InstancePlugin):
|
||||
|
|
@ -92,10 +97,86 @@ class ValidateFrameRange(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
Repair instance container to match asset data.
|
||||
"""
|
||||
cmds.setAttr(
|
||||
"{}.frameStart".format(instance.data["name"]),
|
||||
instance.context.data.get("frameStartHandle"))
|
||||
|
||||
cmds.setAttr(
|
||||
"{}.frameEnd".format(instance.data["name"]),
|
||||
instance.context.data.get("frameEndHandle"))
|
||||
if "renderlayer" in instance.data.get("families"):
|
||||
# Special behavior for renderlayers
|
||||
cls.repair_renderlayer(instance)
|
||||
return
|
||||
|
||||
node = instance.data["name"]
|
||||
context = instance.context
|
||||
|
||||
frame_start_handle = int(context.data.get("frameStartHandle"))
|
||||
frame_end_handle = int(context.data.get("frameEndHandle"))
|
||||
handle_start = int(context.data.get("handleStart"))
|
||||
handle_end = int(context.data.get("handleEnd"))
|
||||
frame_start = int(context.data.get("frameStart"))
|
||||
frame_end = int(context.data.get("frameEnd"))
|
||||
|
||||
# Start
|
||||
if cmds.attributeQuery("handleStart", node=node, exists=True):
|
||||
cmds.setAttr("{}.handleStart".format(node), handle_start)
|
||||
cmds.setAttr("{}.frameStart".format(node), frame_start)
|
||||
else:
|
||||
# Include start handle in frame start if no separate handleStart
|
||||
# attribute exists on the node
|
||||
cmds.setAttr("{}.frameStart".format(node), frame_start_handle)
|
||||
|
||||
# End
|
||||
if cmds.attributeQuery("handleEnd", node=node, exists=True):
|
||||
cmds.setAttr("{}.handleEnd".format(node), handle_end)
|
||||
cmds.setAttr("{}.frameEnd".format(node), frame_end)
|
||||
else:
|
||||
# Include end handle in frame end if no separate handleEnd
|
||||
# attribute exists on the node
|
||||
cmds.setAttr("{}.frameEnd".format(node), frame_end_handle)
|
||||
|
||||
@classmethod
|
||||
def repair_renderlayer(cls, instance):
|
||||
"""Apply frame range in render settings"""
|
||||
|
||||
layer = instance.data["setMembers"]
|
||||
context = instance.context
|
||||
|
||||
start_attr = "defaultRenderGlobals.startFrame"
|
||||
end_attr = "defaultRenderGlobals.endFrame"
|
||||
|
||||
frame_start_handle = int(context.data.get("frameStartHandle"))
|
||||
frame_end_handle = int(context.data.get("frameEndHandle"))
|
||||
|
||||
cls._set_attr_in_layer(start_attr, layer, frame_start_handle)
|
||||
cls._set_attr_in_layer(end_attr, layer, frame_end_handle)
|
||||
|
||||
@classmethod
|
||||
def _set_attr_in_layer(cls, node_attr, layer, value):
|
||||
|
||||
if get_attr_in_layer(node_attr, layer=layer) == value:
|
||||
# Already ok. This can happen if you have multiple renderlayers
|
||||
# validated and there are no frame range overrides. The first
|
||||
# layer's repair would have fixed the global value already
|
||||
return
|
||||
|
||||
overrides = list(get_attr_overrides(node_attr, layer=layer))
|
||||
if overrides:
|
||||
# We set the last absolute override if it is an absolute override
|
||||
# otherwise we'll add an Absolute override
|
||||
last_override = overrides[-1][1]
|
||||
if not isinstance(last_override, AbsOverride):
|
||||
collection = last_override.parent()
|
||||
node, attr = node_attr.split(".", 1)
|
||||
last_override = collection.createAbsoluteOverride(node, attr)
|
||||
|
||||
cls.log.debug("Setting {attr} absolute override in "
|
||||
"layer '{layer}': {value}".format(layer=layer,
|
||||
attr=node_attr,
|
||||
value=value))
|
||||
cmds.setAttr(last_override.name() + ".attrValue", value)
|
||||
|
||||
else:
|
||||
# Set the attribute directly
|
||||
# (Note that this will set the global attribute)
|
||||
cls.log.debug("Setting global {attr}: {value}".format(
|
||||
attr=node_attr,
|
||||
value=value
|
||||
))
|
||||
cmds.setAttr(node_attr, value)
|
||||
|
|
|
|||
|
|
@ -2865,10 +2865,11 @@ def get_group_io_nodes(nodes):
|
|||
break
|
||||
|
||||
if input_node is None:
|
||||
raise ValueError("No Input found")
|
||||
log.warning("No Input found")
|
||||
|
||||
if output_node is None:
|
||||
raise ValueError("No Output found")
|
||||
log.warning("No Output found")
|
||||
|
||||
return input_node, output_node
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -35,6 +35,7 @@ from .lib import (
|
|||
)
|
||||
from .workfile_template_builder import (
|
||||
NukePlaceholderLoadPlugin,
|
||||
NukePlaceholderCreatePlugin,
|
||||
build_workfile_template,
|
||||
update_workfile_template,
|
||||
create_placeholder,
|
||||
|
|
@ -139,7 +140,8 @@ def _show_workfiles():
|
|||
|
||||
def get_workfile_build_placeholder_plugins():
|
||||
return [
|
||||
NukePlaceholderLoadPlugin
|
||||
NukePlaceholderLoadPlugin,
|
||||
NukePlaceholderCreatePlugin
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -217,10 +219,6 @@ def _install_menu():
|
|||
"Build Workfile from template",
|
||||
lambda: build_workfile_template()
|
||||
)
|
||||
menu_template.addCommand(
|
||||
"Update Workfile",
|
||||
lambda: update_workfile_template()
|
||||
)
|
||||
menu_template.addSeparator()
|
||||
menu_template.addCommand(
|
||||
"Create Place Holder",
|
||||
|
|
|
|||
|
|
@ -7,7 +7,9 @@ from openpype.pipeline.workfile.workfile_template_builder import (
|
|||
AbstractTemplateBuilder,
|
||||
PlaceholderPlugin,
|
||||
LoadPlaceholderItem,
|
||||
CreatePlaceholderItem,
|
||||
PlaceholderLoadMixin,
|
||||
PlaceholderCreateMixin
|
||||
)
|
||||
from openpype.tools.workfile_template_build import (
|
||||
WorkfileBuildPlaceholderDialog,
|
||||
|
|
@ -32,7 +34,7 @@ PLACEHOLDER_SET = "PLACEHOLDERS_SET"
|
|||
|
||||
|
||||
class NukeTemplateBuilder(AbstractTemplateBuilder):
|
||||
"""Concrete implementation of AbstractTemplateBuilder for maya"""
|
||||
"""Concrete implementation of AbstractTemplateBuilder for nuke"""
|
||||
|
||||
def import_template(self, path):
|
||||
"""Import template into current scene.
|
||||
|
|
@ -40,7 +42,7 @@ class NukeTemplateBuilder(AbstractTemplateBuilder):
|
|||
|
||||
Args:
|
||||
path (str): A path to current template (usually given by
|
||||
get_template_path implementation)
|
||||
get_template_preset implementation)
|
||||
|
||||
Returns:
|
||||
bool: Wether the template was succesfully imported or not
|
||||
|
|
@ -74,8 +76,7 @@ class NukePlaceholderPlugin(PlaceholderPlugin):
|
|||
|
||||
node_knobs = node.knobs()
|
||||
if (
|
||||
"builder_type" not in node_knobs
|
||||
or "is_placeholder" not in node_knobs
|
||||
"is_placeholder" not in node_knobs
|
||||
or not node.knob("is_placeholder").value()
|
||||
):
|
||||
continue
|
||||
|
|
@ -273,6 +274,15 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin):
|
|||
|
||||
placeholder.data["nb_children"] += 1
|
||||
reset_selection()
|
||||
|
||||
# remove placeholders marked as delete
|
||||
if (
|
||||
placeholder.data.get("delete")
|
||||
and not placeholder.data.get("keep_placeholder")
|
||||
):
|
||||
self.log.debug("Deleting node: {}".format(placeholder_node.name()))
|
||||
nuke.delete(placeholder_node)
|
||||
|
||||
# go back to root group
|
||||
nuke.root().begin()
|
||||
|
||||
|
|
@ -454,12 +464,12 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin):
|
|||
)
|
||||
for node in placeholder_node.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) == placeholder_node:
|
||||
if node.input(idx) == placeholder_node and output_node:
|
||||
node.setInput(idx, output_node)
|
||||
|
||||
for node in placeholder_node.dependencies():
|
||||
for idx in range(placeholder_node.inputs()):
|
||||
if placeholder_node.input(idx) == node:
|
||||
if placeholder_node.input(idx) == node and input_node:
|
||||
input_node.setInput(0, node)
|
||||
|
||||
def _create_sib_copies(self, placeholder):
|
||||
|
|
@ -535,6 +545,408 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin):
|
|||
siblings_input.setInput(0, copy_output)
|
||||
|
||||
|
||||
class NukePlaceholderCreatePlugin(
|
||||
NukePlaceholderPlugin, PlaceholderCreateMixin
|
||||
):
|
||||
identifier = "nuke.create"
|
||||
label = "Nuke create"
|
||||
|
||||
def _parse_placeholder_node_data(self, node):
|
||||
placeholder_data = super(
|
||||
NukePlaceholderCreatePlugin, self
|
||||
)._parse_placeholder_node_data(node)
|
||||
|
||||
node_knobs = node.knobs()
|
||||
nb_children = 0
|
||||
if "nb_children" in node_knobs:
|
||||
nb_children = int(node_knobs["nb_children"].getValue())
|
||||
placeholder_data["nb_children"] = nb_children
|
||||
|
||||
siblings = []
|
||||
if "siblings" in node_knobs:
|
||||
siblings = node_knobs["siblings"].values()
|
||||
placeholder_data["siblings"] = siblings
|
||||
|
||||
node_full_name = node.fullName()
|
||||
placeholder_data["group_name"] = node_full_name.rpartition(".")[0]
|
||||
placeholder_data["last_loaded"] = []
|
||||
placeholder_data["delete"] = False
|
||||
return placeholder_data
|
||||
|
||||
def _before_instance_create(self, placeholder):
|
||||
placeholder.data["nodes_init"] = nuke.allNodes()
|
||||
|
||||
def collect_placeholders(self):
|
||||
output = []
|
||||
scene_placeholders = self._collect_scene_placeholders()
|
||||
for node_name, node in scene_placeholders.items():
|
||||
plugin_identifier_knob = node.knob("plugin_identifier")
|
||||
if (
|
||||
plugin_identifier_knob is None
|
||||
or plugin_identifier_knob.getValue() != self.identifier
|
||||
):
|
||||
continue
|
||||
|
||||
placeholder_data = self._parse_placeholder_node_data(node)
|
||||
|
||||
output.append(
|
||||
CreatePlaceholderItem(node_name, placeholder_data, self)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
self.populate_create_placeholder(placeholder)
|
||||
|
||||
def repopulate_placeholder(self, placeholder):
|
||||
self.populate_create_placeholder(placeholder)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_create_plugin_options(options)
|
||||
|
||||
def cleanup_placeholder(self, placeholder, failed):
|
||||
# deselect all selected nodes
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
# getting the latest nodes added
|
||||
nodes_init = placeholder.data["nodes_init"]
|
||||
nodes_created = list(set(nuke.allNodes()) - set(nodes_init))
|
||||
self.log.debug("Created nodes: {}".format(nodes_created))
|
||||
if not nodes_created:
|
||||
return
|
||||
|
||||
placeholder.data["delete"] = True
|
||||
|
||||
nodes_created = self._move_to_placeholder_group(
|
||||
placeholder, nodes_created
|
||||
)
|
||||
placeholder.data["last_created"] = nodes_created
|
||||
refresh_nodes(nodes_created)
|
||||
|
||||
# positioning of the created nodes
|
||||
min_x, min_y, _, _ = get_extreme_positions(nodes_created)
|
||||
for node in nodes_created:
|
||||
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
|
||||
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
|
||||
node.setXYpos(xpos, ypos)
|
||||
refresh_nodes(nodes_created)
|
||||
|
||||
# fix the problem of z_order for backdrops
|
||||
self._fix_z_order(placeholder)
|
||||
self._imprint_siblings(placeholder)
|
||||
|
||||
if placeholder.data["nb_children"] == 0:
|
||||
# save initial nodes postions and dimensions, update them
|
||||
# and set inputs and outputs of created nodes
|
||||
|
||||
self._imprint_inits()
|
||||
self._update_nodes(placeholder, nuke.allNodes(), nodes_created)
|
||||
self._set_created_connections(placeholder)
|
||||
|
||||
elif placeholder.data["siblings"]:
|
||||
# create copies of placeholder siblings for the new created nodes,
|
||||
# set their inputs and outpus and update all nodes positions and
|
||||
# dimensions and siblings names
|
||||
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
refresh_nodes(siblings)
|
||||
copies = self._create_sib_copies(placeholder)
|
||||
new_nodes = list(copies.values()) # copies nodes
|
||||
self._update_nodes(new_nodes, nodes_created)
|
||||
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
|
||||
new_nodes_name = get_names_from_nodes(new_nodes)
|
||||
imprint(placeholder_node, {"siblings": new_nodes_name})
|
||||
self._set_copies_connections(placeholder, copies)
|
||||
|
||||
self._update_nodes(
|
||||
nuke.allNodes(),
|
||||
new_nodes + nodes_created,
|
||||
20
|
||||
)
|
||||
|
||||
new_siblings = get_names_from_nodes(new_nodes)
|
||||
placeholder.data["siblings"] = new_siblings
|
||||
|
||||
else:
|
||||
# if the placeholder doesn't have siblings, the created
|
||||
# nodes will be placed in a free space
|
||||
|
||||
xpointer, ypointer = find_free_space_to_paste_nodes(
|
||||
nodes_created, direction="bottom", offset=200
|
||||
)
|
||||
node = nuke.createNode("NoOp")
|
||||
reset_selection()
|
||||
nuke.delete(node)
|
||||
for node in nodes_created:
|
||||
xpos = (node.xpos() - min_x) + xpointer
|
||||
ypos = (node.ypos() - min_y) + ypointer
|
||||
node.setXYpos(xpos, ypos)
|
||||
|
||||
placeholder.data["nb_children"] += 1
|
||||
reset_selection()
|
||||
|
||||
# remove placeholders marked as delete
|
||||
if (
|
||||
placeholder.data.get("delete")
|
||||
and not placeholder.data.get("keep_placeholder")
|
||||
):
|
||||
self.log.debug("Deleting node: {}".format(placeholder_node.name()))
|
||||
nuke.delete(placeholder_node)
|
||||
|
||||
# go back to root group
|
||||
nuke.root().begin()
|
||||
|
||||
def _move_to_placeholder_group(self, placeholder, nodes_created):
|
||||
"""
|
||||
opening the placeholder's group and copying created nodes in it.
|
||||
|
||||
Returns :
|
||||
nodes_created (list): the new list of pasted nodes
|
||||
"""
|
||||
groups_name = placeholder.data["group_name"]
|
||||
reset_selection()
|
||||
select_nodes(nodes_created)
|
||||
if groups_name:
|
||||
with node_tempfile() as filepath:
|
||||
nuke.nodeCopy(filepath)
|
||||
for node in nuke.selectedNodes():
|
||||
nuke.delete(node)
|
||||
group = nuke.toNode(groups_name)
|
||||
group.begin()
|
||||
nuke.nodePaste(filepath)
|
||||
nodes_created = nuke.selectedNodes()
|
||||
return nodes_created
|
||||
|
||||
def _fix_z_order(self, placeholder):
|
||||
"""Fix the problem of z_order when a backdrop is create."""
|
||||
|
||||
nodes_created = placeholder.data["last_created"]
|
||||
created_backdrops = []
|
||||
bd_orders = set()
|
||||
for node in nodes_created:
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
created_backdrops.append(node)
|
||||
bd_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not bd_orders:
|
||||
return
|
||||
|
||||
sib_orders = set()
|
||||
for node_name in placeholder.data["siblings"]:
|
||||
node = nuke.toNode(node_name)
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
sib_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not sib_orders:
|
||||
return
|
||||
|
||||
min_order = min(bd_orders)
|
||||
max_order = max(sib_orders)
|
||||
for backdrop_node in created_backdrops:
|
||||
z_order = backdrop_node.knob("z_order").getValue()
|
||||
backdrop_node.knob("z_order").setValue(
|
||||
z_order + max_order - min_order + 1)
|
||||
|
||||
def _imprint_siblings(self, placeholder):
|
||||
"""
|
||||
- add siblings names to placeholder attributes (nodes created with it)
|
||||
- add Id to the attributes of all the other nodes
|
||||
"""
|
||||
|
||||
created_nodes = placeholder.data["last_created"]
|
||||
created_nodes_set = set(created_nodes)
|
||||
|
||||
for node in created_nodes:
|
||||
node_knobs = node.knobs()
|
||||
|
||||
if (
|
||||
"is_placeholder" not in node_knobs
|
||||
or (
|
||||
"is_placeholder" in node_knobs
|
||||
and node.knob("is_placeholder").value()
|
||||
)
|
||||
):
|
||||
siblings = list(created_nodes_set - {node})
|
||||
siblings_name = get_names_from_nodes(siblings)
|
||||
siblings = {"siblings": siblings_name}
|
||||
imprint(node, siblings)
|
||||
|
||||
def _imprint_inits(self):
|
||||
"""Add initial positions and dimensions to the attributes"""
|
||||
|
||||
for node in nuke.allNodes():
|
||||
refresh_node(node)
|
||||
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
|
||||
node.knob("x_init").setVisible(False)
|
||||
node.knob("y_init").setVisible(False)
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
if "bdwidth" in node.knobs():
|
||||
imprint(node, {"w_init": width, "h_init": height})
|
||||
node.knob("w_init").setVisible(False)
|
||||
node.knob("h_init").setVisible(False)
|
||||
refresh_node(node)
|
||||
|
||||
def _update_nodes(
|
||||
self, placeholder, nodes, considered_nodes, offset_y=None
|
||||
):
|
||||
"""Adjust backdrop nodes dimensions and positions.
|
||||
|
||||
Considering some nodes sizes.
|
||||
|
||||
Args:
|
||||
nodes (list): list of nodes to update
|
||||
considered_nodes (list): list of nodes to consider while updating
|
||||
positions and dimensions
|
||||
offset (int): distance between copies
|
||||
"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes)
|
||||
|
||||
diff_x = diff_y = 0
|
||||
contained_nodes = [] # for backdrops
|
||||
|
||||
if offset_y is None:
|
||||
width_ph = placeholder_node.screenWidth()
|
||||
height_ph = placeholder_node.screenHeight()
|
||||
diff_y = max_y - min_y - height_ph
|
||||
diff_x = max_x - min_x - width_ph
|
||||
contained_nodes = [placeholder_node]
|
||||
min_x = placeholder_node.xpos()
|
||||
min_y = placeholder_node.ypos()
|
||||
else:
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
minX, _, maxX, _ = get_extreme_positions(siblings)
|
||||
diff_y = max_y - min_y + 20
|
||||
diff_x = abs(max_x - min_x - maxX + minX)
|
||||
contained_nodes = considered_nodes
|
||||
|
||||
if diff_y <= 0 and diff_x <= 0:
|
||||
return
|
||||
|
||||
for node in nodes:
|
||||
refresh_node(node)
|
||||
|
||||
if (
|
||||
node == placeholder_node
|
||||
or node in considered_nodes
|
||||
):
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(node, nuke.BackdropNode)
|
||||
or (
|
||||
isinstance(node, nuke.BackdropNode)
|
||||
and not set(contained_nodes) <= set(node.getNodes())
|
||||
)
|
||||
):
|
||||
if offset_y is None and node.xpos() >= min_x:
|
||||
node.setXpos(node.xpos() + diff_x)
|
||||
|
||||
if node.ypos() >= min_y:
|
||||
node.setYpos(node.ypos() + diff_y)
|
||||
|
||||
else:
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
node.knob("bdwidth").setValue(width + diff_x)
|
||||
node.knob("bdheight").setValue(height + diff_y)
|
||||
|
||||
refresh_node(node)
|
||||
|
||||
def _set_created_connections(self, placeholder):
|
||||
"""
|
||||
set inputs and outputs of created nodes"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
input_node, output_node = get_group_io_nodes(
|
||||
placeholder.data["last_created"]
|
||||
)
|
||||
for node in placeholder_node.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) == placeholder_node and output_node:
|
||||
node.setInput(idx, output_node)
|
||||
|
||||
for node in placeholder_node.dependencies():
|
||||
for idx in range(placeholder_node.inputs()):
|
||||
if placeholder_node.input(idx) == node and input_node:
|
||||
input_node.setInput(0, node)
|
||||
|
||||
def _create_sib_copies(self, placeholder):
|
||||
""" creating copies of the palce_holder siblings (the ones who were
|
||||
created with it) for the new nodes added
|
||||
|
||||
Returns :
|
||||
copies (dict) : with copied nodes names and their copies
|
||||
"""
|
||||
|
||||
copies = {}
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
for node in siblings:
|
||||
new_node = duplicate_node(node)
|
||||
|
||||
x_init = int(new_node.knob("x_init").getValue())
|
||||
y_init = int(new_node.knob("y_init").getValue())
|
||||
new_node.setXYpos(x_init, y_init)
|
||||
if isinstance(new_node, nuke.BackdropNode):
|
||||
w_init = new_node.knob("w_init").getValue()
|
||||
h_init = new_node.knob("h_init").getValue()
|
||||
new_node.knob("bdwidth").setValue(w_init)
|
||||
new_node.knob("bdheight").setValue(h_init)
|
||||
refresh_node(node)
|
||||
|
||||
if "repre_id" in node.knobs().keys():
|
||||
node.removeKnob(node.knob("repre_id"))
|
||||
copies[node.name()] = new_node
|
||||
return copies
|
||||
|
||||
def _set_copies_connections(self, placeholder, copies):
|
||||
"""Set inputs and outputs of the copies.
|
||||
|
||||
Args:
|
||||
copies (dict): Copied nodes by their names.
|
||||
"""
|
||||
|
||||
last_input, last_output = get_group_io_nodes(
|
||||
placeholder.data["last_created"]
|
||||
)
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
siblings_input, siblings_output = get_group_io_nodes(siblings)
|
||||
copy_input = copies[siblings_input.name()]
|
||||
copy_output = copies[siblings_output.name()]
|
||||
|
||||
for node_init in siblings:
|
||||
if node_init == siblings_output:
|
||||
continue
|
||||
|
||||
node_copy = copies[node_init.name()]
|
||||
for node in node_init.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) != node_init:
|
||||
continue
|
||||
|
||||
if node in siblings:
|
||||
copies[node.name()].setInput(idx, node_copy)
|
||||
else:
|
||||
last_input.setInput(0, node_copy)
|
||||
|
||||
for node in node_init.dependencies():
|
||||
for idx in range(node_init.inputs()):
|
||||
if node_init.input(idx) != node:
|
||||
continue
|
||||
|
||||
if node_init == siblings_input:
|
||||
copy_input.setInput(idx, node)
|
||||
elif node in siblings:
|
||||
node_copy.setInput(idx, copies[node.name()])
|
||||
else:
|
||||
node_copy.setInput(idx, last_output)
|
||||
|
||||
siblings_input.setInput(0, copy_output)
|
||||
|
||||
|
||||
def build_workfile_template(*args):
|
||||
builder = NukeTemplateBuilder(registered_host())
|
||||
builder.build_template()
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ class LoadBackdropNodes(load.LoaderPlugin):
|
|||
representations = ["nk"]
|
||||
families = ["workfile", "nukenodes"]
|
||||
|
||||
label = "Iport Nuke Nodes"
|
||||
label = "Import Nuke Nodes"
|
||||
order = 0
|
||||
icon = "eye"
|
||||
color = "white"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,2 @@
|
|||
[/Script/OpenPype.OpenPypeSettings]
|
||||
FolderColor=(R=91,G=197,B=220,A=255)
|
||||
|
|
@ -42,6 +42,7 @@ public class OpenPype : ModuleRules
|
|||
"Engine",
|
||||
"Slate",
|
||||
"SlateCore",
|
||||
"AssetTools"
|
||||
// ... add private dependencies that you statically link with here ...
|
||||
}
|
||||
);
|
||||
|
|
|
|||
|
|
@ -1,6 +1,11 @@
|
|||
#include "OpenPype.h"
|
||||
|
||||
#include "ISettingsContainer.h"
|
||||
#include "ISettingsModule.h"
|
||||
#include "ISettingsSection.h"
|
||||
#include "LevelEditor.h"
|
||||
#include "OpenPypePythonBridge.h"
|
||||
#include "OpenPypeSettings.h"
|
||||
#include "OpenPypeStyle.h"
|
||||
|
||||
|
||||
|
|
@ -11,13 +16,12 @@ static const FName OpenPypeTabName("OpenPype");
|
|||
// This function is triggered when the plugin is staring up
|
||||
void FOpenPypeModule::StartupModule()
|
||||
{
|
||||
|
||||
FOpenPypeStyle::Initialize();
|
||||
FOpenPypeStyle::SetIcon("Logo", "openpype40");
|
||||
|
||||
// Create the Extender that will add content to the menu
|
||||
FLevelEditorModule& LevelEditorModule = FModuleManager::LoadModuleChecked<FLevelEditorModule>("LevelEditor");
|
||||
|
||||
|
||||
TSharedPtr<FExtender> MenuExtender = MakeShareable(new FExtender());
|
||||
TSharedPtr<FExtender> ToolbarExtender = MakeShareable(new FExtender());
|
||||
|
||||
|
|
@ -37,6 +41,7 @@ void FOpenPypeModule::StartupModule()
|
|||
LevelEditorModule.GetMenuExtensibilityManager()->AddExtender(MenuExtender);
|
||||
LevelEditorModule.GetToolBarExtensibilityManager()->AddExtender(ToolbarExtender);
|
||||
|
||||
RegisterSettings();
|
||||
}
|
||||
|
||||
void FOpenPypeModule::ShutdownModule()
|
||||
|
|
@ -64,7 +69,6 @@ void FOpenPypeModule::AddMenuEntry(FMenuBuilder& MenuBuilder)
|
|||
FSlateIcon(FOpenPypeStyle::GetStyleSetName(), "OpenPype.Logo"),
|
||||
FUIAction(FExecuteAction::CreateRaw(this, &FOpenPypeModule::MenuDialog))
|
||||
);
|
||||
|
||||
}
|
||||
MenuBuilder.EndSection();
|
||||
}
|
||||
|
|
@ -89,13 +93,58 @@ void FOpenPypeModule::AddToobarEntry(FToolBarBuilder& ToolbarBuilder)
|
|||
ToolbarBuilder.EndSection();
|
||||
}
|
||||
|
||||
void FOpenPypeModule::RegisterSettings()
|
||||
{
|
||||
ISettingsModule& SettingsModule = FModuleManager::LoadModuleChecked<ISettingsModule>("Settings");
|
||||
|
||||
void FOpenPypeModule::MenuPopup() {
|
||||
// Create the new category
|
||||
// TODO: After the movement of the plugin from the game to editor, it might be necessary to move this!
|
||||
ISettingsContainerPtr SettingsContainer = SettingsModule.GetContainer("Project");
|
||||
|
||||
UOpenPypeSettings* Settings = GetMutableDefault<UOpenPypeSettings>();
|
||||
|
||||
// Register the settings
|
||||
ISettingsSectionPtr SettingsSection = SettingsModule.RegisterSettings("Project", "OpenPype", "General",
|
||||
LOCTEXT("RuntimeGeneralSettingsName",
|
||||
"General"),
|
||||
LOCTEXT("RuntimeGeneralSettingsDescription",
|
||||
"Base configuration for Open Pype Module"),
|
||||
Settings
|
||||
);
|
||||
|
||||
// Register the save handler to your settings, you might want to use it to
|
||||
// validate those or just act to settings changes.
|
||||
if (SettingsSection.IsValid())
|
||||
{
|
||||
SettingsSection->OnModified().BindRaw(this, &FOpenPypeModule::HandleSettingsSaved);
|
||||
}
|
||||
}
|
||||
|
||||
bool FOpenPypeModule::HandleSettingsSaved()
|
||||
{
|
||||
UOpenPypeSettings* Settings = GetMutableDefault<UOpenPypeSettings>();
|
||||
bool ResaveSettings = false;
|
||||
|
||||
// You can put any validation code in here and resave the settings in case an invalid
|
||||
// value has been entered
|
||||
|
||||
if (ResaveSettings)
|
||||
{
|
||||
Settings->SaveConfig();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
void FOpenPypeModule::MenuPopup()
|
||||
{
|
||||
UOpenPypePythonBridge* bridge = UOpenPypePythonBridge::Get();
|
||||
bridge->RunInPython_Popup();
|
||||
}
|
||||
|
||||
void FOpenPypeModule::MenuDialog() {
|
||||
void FOpenPypeModule::MenuDialog()
|
||||
{
|
||||
UOpenPypePythonBridge* bridge = UOpenPypePythonBridge::Get();
|
||||
bridge->RunInPython_Dialog();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
#include "OpenPypeLib.h"
|
||||
|
||||
#include "AssetViewUtils.h"
|
||||
#include "Misc/Paths.h"
|
||||
#include "Misc/ConfigCacheIni.h"
|
||||
#include "UObject/UnrealType.h"
|
||||
|
|
@ -10,21 +12,23 @@
|
|||
* @warning This color will appear only after Editor restart. Is there a better way?
|
||||
*/
|
||||
|
||||
void UOpenPypeLib::CSetFolderColor(FString FolderPath, FLinearColor FolderColor, bool bForceAdd)
|
||||
bool UOpenPypeLib::SetFolderColor(const FString& FolderPath, const FLinearColor& FolderColor, const bool& bForceAdd)
|
||||
{
|
||||
auto SaveColorInternal = [](FString InPath, FLinearColor InFolderColor)
|
||||
if (AssetViewUtils::DoesFolderExist(FolderPath))
|
||||
{
|
||||
// Saves the color of the folder to the config
|
||||
if (FPaths::FileExists(GEditorPerProjectIni))
|
||||
{
|
||||
GConfig->SetString(TEXT("PathColor"), *InPath, *InFolderColor.ToString(), GEditorPerProjectIni);
|
||||
}
|
||||
const TSharedPtr<FLinearColor> LinearColor = MakeShared<FLinearColor>(FolderColor);
|
||||
|
||||
};
|
||||
|
||||
SaveColorInternal(FolderPath, FolderColor);
|
||||
AssetViewUtils::SaveColor(FolderPath, LinearColor, true);
|
||||
UE_LOG(LogAssetData, Display, TEXT("A color {%s} has been set to folder \"%s\""), *LinearColor->ToString(),
|
||||
*FolderPath)
|
||||
return true;
|
||||
}
|
||||
|
||||
UE_LOG(LogAssetData, Display, TEXT("Setting a color {%s} to folder \"%s\" has failed! Directory doesn't exist!"),
|
||||
*FolderColor.ToString(), *FolderPath)
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all poperties on given object
|
||||
* @param cls - class
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
#include "OpenPypePublishInstance.h"
|
||||
#include "AssetRegistryModule.h"
|
||||
#include "NotificationManager.h"
|
||||
#include "OpenPypeLib.h"
|
||||
#include "OpenPypeSettings.h"
|
||||
#include "SNotificationList.h"
|
||||
|
||||
//Moves all the invalid pointers to the end to prepare them for the shrinking
|
||||
|
|
@ -36,6 +38,11 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj
|
|||
AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated);
|
||||
AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved);
|
||||
AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated);
|
||||
|
||||
#ifdef WITH_EDITOR
|
||||
ColorOpenPypeDirs();
|
||||
#endif
|
||||
|
||||
}
|
||||
|
||||
void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData)
|
||||
|
|
@ -58,7 +65,7 @@ void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData)
|
|||
if (AssetDataInternal.Emplace(Asset).IsValidId())
|
||||
{
|
||||
UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"),
|
||||
*this->GetName(), *Asset->GetName());
|
||||
*this->GetName(), *Asset->GetName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -96,6 +103,48 @@ bool UOpenPypePublishInstance::IsUnderSameDir(const UObject* InAsset) const
|
|||
|
||||
#ifdef WITH_EDITOR
|
||||
|
||||
void UOpenPypePublishInstance::ColorOpenPypeDirs()
|
||||
{
|
||||
FString PathName = this->GetPathName();
|
||||
|
||||
//Check whether the path contains the defined OpenPype folder
|
||||
if (!PathName.Contains(TEXT("OpenPype"))) return;
|
||||
|
||||
//Get the base path for open pype
|
||||
FString PathLeft, PathRight;
|
||||
PathName.Split(FString("OpenPype"), &PathLeft, &PathRight);
|
||||
|
||||
if (PathLeft.IsEmpty() || PathRight.IsEmpty())
|
||||
{
|
||||
UE_LOG(LogAssetData, Error, TEXT("Failed to retrieve the base OpenPype directory!"))
|
||||
return;
|
||||
}
|
||||
|
||||
PathName.RemoveFromEnd(PathRight, ESearchCase::CaseSensitive);
|
||||
|
||||
//Get the current settings
|
||||
const UOpenPypeSettings* Settings = GetMutableDefault<UOpenPypeSettings>();
|
||||
|
||||
//Color the base folder
|
||||
UOpenPypeLib::SetFolderColor(PathName, Settings->GetFolderFColor(), false);
|
||||
|
||||
//Get Sub paths, iterate through them and color them according to the folder color in UOpenPypeSettings
|
||||
const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked<FAssetRegistryModule>(
|
||||
"AssetRegistry");
|
||||
|
||||
TArray<FString> PathList;
|
||||
|
||||
AssetRegistryModule.Get().GetSubPaths(PathName, PathList, true);
|
||||
|
||||
if (PathList.Num() > 0)
|
||||
{
|
||||
for (const FString& Path : PathList)
|
||||
{
|
||||
UOpenPypeLib::SetFolderColor(Path, Settings->GetFolderFColor(), false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void UOpenPypePublishInstance::SendNotification(const FString& Text) const
|
||||
{
|
||||
FNotificationInfo Info{FText::FromString(Text)};
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
// Fill out your copyright notice in the Description page of Project Settings.
|
||||
|
||||
#include "OpenPypeSettings.h"
|
||||
|
||||
#include "IPluginManager.h"
|
||||
#include "UObjectGlobals.h"
|
||||
|
||||
/**
|
||||
* Mainly is used for initializing default values if the DefaultOpenPypeSettings.ini file does not exist in the saved config
|
||||
*/
|
||||
UOpenPypeSettings::UOpenPypeSettings(const FObjectInitializer& ObjectInitializer)
|
||||
{
|
||||
|
||||
const FString ConfigFilePath = OPENPYPE_SETTINGS_FILEPATH;
|
||||
|
||||
// This has to be probably in the future set using the UE Reflection system
|
||||
FColor Color;
|
||||
GConfig->GetColor(TEXT("/Script/OpenPype.OpenPypeSettings"), TEXT("FolderColor"), Color, ConfigFilePath);
|
||||
|
||||
FolderColor = Color;
|
||||
}
|
||||
|
|
@ -12,10 +12,11 @@ public:
|
|||
virtual void ShutdownModule() override;
|
||||
|
||||
private:
|
||||
void RegisterSettings();
|
||||
bool HandleSettingsSaved();
|
||||
|
||||
void AddMenuEntry(FMenuBuilder& MenuBuilder);
|
||||
void AddToobarEntry(FToolBarBuilder& ToolbarBuilder);
|
||||
void MenuPopup();
|
||||
void MenuDialog();
|
||||
|
||||
};
|
||||
|
|
|
|||
|
|
@ -5,14 +5,14 @@
|
|||
|
||||
|
||||
UCLASS(Blueprintable)
|
||||
class OPENPYPE_API UOpenPypeLib : public UObject
|
||||
class OPENPYPE_API UOpenPypeLib : public UBlueprintFunctionLibrary
|
||||
{
|
||||
|
||||
GENERATED_BODY()
|
||||
|
||||
public:
|
||||
UFUNCTION(BlueprintCallable, Category = Python)
|
||||
static void CSetFolderColor(FString FolderPath, FLinearColor FolderColor, bool bForceAdd);
|
||||
static bool SetFolderColor(const FString& FolderPath, const FLinearColor& FolderColor,const bool& bForceAdd);
|
||||
|
||||
UFUNCTION(BlueprintCallable, Category = Python)
|
||||
static TArray<FString> GetAllProperties(UClass* cls);
|
||||
|
|
|
|||
|
|
@ -8,10 +8,8 @@ UCLASS(Blueprintable)
|
|||
class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset
|
||||
{
|
||||
GENERATED_UCLASS_BODY()
|
||||
|
||||
|
||||
public:
|
||||
|
||||
/**
|
||||
/**
|
||||
* Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is
|
||||
* placed in)
|
||||
|
|
@ -58,8 +56,10 @@ public:
|
|||
UFUNCTION(BlueprintCallable, BlueprintPure)
|
||||
TSet<UObject*> GetAllAssets() const
|
||||
{
|
||||
const TSet<TSoftObjectPtr<UObject>>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal;
|
||||
|
||||
const TSet<TSoftObjectPtr<UObject>>& IteratedSet = bAddExternalAssets
|
||||
? AssetDataInternal.Union(AssetDataExternal)
|
||||
: AssetDataInternal;
|
||||
|
||||
//Create a new TSet only with raw pointers.
|
||||
TSet<UObject*> ResultSet;
|
||||
|
||||
|
|
@ -69,12 +69,10 @@ public:
|
|||
return ResultSet;
|
||||
}
|
||||
|
||||
|
||||
private:
|
||||
|
||||
UPROPERTY(VisibleAnywhere, Category="Assets")
|
||||
TSet<TSoftObjectPtr<UObject>> AssetDataInternal;
|
||||
|
||||
|
||||
/**
|
||||
* This property allows exposing the array to include other assets from any other directory than what it's currently
|
||||
* monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added!
|
||||
|
|
@ -93,11 +91,11 @@ private:
|
|||
bool IsUnderSameDir(const UObject* InAsset) const;
|
||||
|
||||
#ifdef WITH_EDITOR
|
||||
|
||||
void ColorOpenPypeDirs();
|
||||
|
||||
void SendNotification(const FString& Text) const;
|
||||
virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override;
|
||||
|
||||
#endif
|
||||
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,32 @@
|
|||
// Fill out your copyright notice in the Description page of Project Settings.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "CoreMinimal.h"
|
||||
#include "Object.h"
|
||||
#include "OpenPypeSettings.generated.h"
|
||||
|
||||
#define OPENPYPE_SETTINGS_FILEPATH IPluginManager::Get().FindPlugin("OpenPype")->GetBaseDir() / TEXT("Config") / TEXT("DefaultOpenPypeSettings.ini")
|
||||
|
||||
UCLASS(Config=OpenPypeSettings, DefaultConfig)
|
||||
class OPENPYPE_API UOpenPypeSettings : public UObject
|
||||
{
|
||||
GENERATED_UCLASS_BODY()
|
||||
|
||||
UFUNCTION(BlueprintCallable, BlueprintPure, Category = Settings)
|
||||
FColor GetFolderFColor() const
|
||||
{
|
||||
return FolderColor;
|
||||
}
|
||||
|
||||
UFUNCTION(BlueprintCallable, BlueprintPure, Category = Settings)
|
||||
FLinearColor GetFolderFLinearColor() const
|
||||
{
|
||||
return FLinearColor(FolderColor);
|
||||
}
|
||||
|
||||
protected:
|
||||
|
||||
UPROPERTY(config, EditAnywhere, Category = Folders)
|
||||
FColor FolderColor = FColor(25,45,223);
|
||||
};
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
[/Script/OpenPype.OpenPypeSettings]
|
||||
FolderColor=(R=91,G=197,B=220,A=255)
|
||||
|
|
@ -48,6 +48,7 @@ public class OpenPype : ModuleRules
|
|||
"Engine",
|
||||
"Slate",
|
||||
"SlateCore",
|
||||
"AssetTools"
|
||||
// ... add private dependencies that you statically link with here ...
|
||||
}
|
||||
);
|
||||
|
|
|
|||
|
|
@ -1,8 +1,12 @@
|
|||
#include "OpenPype.h"
|
||||
|
||||
#include "ISettingsContainer.h"
|
||||
#include "ISettingsModule.h"
|
||||
#include "ISettingsSection.h"
|
||||
#include "OpenPypeStyle.h"
|
||||
#include "OpenPypeCommands.h"
|
||||
#include "OpenPypePythonBridge.h"
|
||||
#include "LevelEditor.h"
|
||||
#include "OpenPypeSettings.h"
|
||||
#include "Misc/MessageDialog.h"
|
||||
#include "ToolMenus.h"
|
||||
|
||||
|
|
@ -29,7 +33,10 @@ void FOpenPypeModule::StartupModule()
|
|||
FExecuteAction::CreateRaw(this, &FOpenPypeModule::MenuDialog),
|
||||
FCanExecuteAction());
|
||||
|
||||
UToolMenus::RegisterStartupCallback(FSimpleMulticastDelegate::FDelegate::CreateRaw(this, &FOpenPypeModule::RegisterMenus));
|
||||
UToolMenus::RegisterStartupCallback(
|
||||
FSimpleMulticastDelegate::FDelegate::CreateRaw(this, &FOpenPypeModule::RegisterMenus));
|
||||
|
||||
RegisterSettings();
|
||||
}
|
||||
|
||||
void FOpenPypeModule::ShutdownModule()
|
||||
|
|
@ -43,6 +50,50 @@ void FOpenPypeModule::ShutdownModule()
|
|||
FOpenPypeCommands::Unregister();
|
||||
}
|
||||
|
||||
|
||||
void FOpenPypeModule::RegisterSettings()
|
||||
{
|
||||
ISettingsModule& SettingsModule = FModuleManager::LoadModuleChecked<ISettingsModule>("Settings");
|
||||
|
||||
// Create the new category
|
||||
// TODO: After the movement of the plugin from the game to editor, it might be necessary to move this!
|
||||
ISettingsContainerPtr SettingsContainer = SettingsModule.GetContainer("Project");
|
||||
|
||||
UOpenPypeSettings* Settings = GetMutableDefault<UOpenPypeSettings>();
|
||||
|
||||
// Register the settings
|
||||
ISettingsSectionPtr SettingsSection = SettingsModule.RegisterSettings("Project", "OpenPype", "General",
|
||||
LOCTEXT("RuntimeGeneralSettingsName",
|
||||
"General"),
|
||||
LOCTEXT("RuntimeGeneralSettingsDescription",
|
||||
"Base configuration for Open Pype Module"),
|
||||
Settings
|
||||
);
|
||||
|
||||
// Register the save handler to your settings, you might want to use it to
|
||||
// validate those or just act to settings changes.
|
||||
if (SettingsSection.IsValid())
|
||||
{
|
||||
SettingsSection->OnModified().BindRaw(this, &FOpenPypeModule::HandleSettingsSaved);
|
||||
}
|
||||
}
|
||||
|
||||
bool FOpenPypeModule::HandleSettingsSaved()
|
||||
{
|
||||
UOpenPypeSettings* Settings = GetMutableDefault<UOpenPypeSettings>();
|
||||
bool ResaveSettings = false;
|
||||
|
||||
// You can put any validation code in here and resave the settings in case an invalid
|
||||
// value has been entered
|
||||
|
||||
if (ResaveSettings)
|
||||
{
|
||||
Settings->SaveConfig();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void FOpenPypeModule::RegisterMenus()
|
||||
{
|
||||
// Owner will be used for cleanup in call to UToolMenus::UnregisterOwner
|
||||
|
|
@ -64,7 +115,8 @@ void FOpenPypeModule::RegisterMenus()
|
|||
{
|
||||
FToolMenuSection& Section = ToolbarMenu->FindOrAddSection("PluginTools");
|
||||
{
|
||||
FToolMenuEntry& Entry = Section.AddEntry(FToolMenuEntry::InitToolBarButton(FOpenPypeCommands::Get().OpenPypeTools));
|
||||
FToolMenuEntry& Entry = Section.AddEntry(
|
||||
FToolMenuEntry::InitToolBarButton(FOpenPypeCommands::Get().OpenPypeTools));
|
||||
Entry.SetCommandList(PluginCommands);
|
||||
}
|
||||
}
|
||||
|
|
@ -72,12 +124,14 @@ void FOpenPypeModule::RegisterMenus()
|
|||
}
|
||||
|
||||
|
||||
void FOpenPypeModule::MenuPopup() {
|
||||
void FOpenPypeModule::MenuPopup()
|
||||
{
|
||||
UOpenPypePythonBridge* bridge = UOpenPypePythonBridge::Get();
|
||||
bridge->RunInPython_Popup();
|
||||
}
|
||||
|
||||
void FOpenPypeModule::MenuDialog() {
|
||||
void FOpenPypeModule::MenuDialog()
|
||||
{
|
||||
UOpenPypePythonBridge* bridge = UOpenPypePythonBridge::Get();
|
||||
bridge->RunInPython_Dialog();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
#include "OpenPypeLib.h"
|
||||
|
||||
#include "AssetViewUtils.h"
|
||||
#include "Misc/Paths.h"
|
||||
#include "Misc/ConfigCacheIni.h"
|
||||
#include "UObject/UnrealType.h"
|
||||
|
|
@ -10,21 +12,23 @@
|
|||
* @warning This color will appear only after Editor restart. Is there a better way?
|
||||
*/
|
||||
|
||||
void UOpenPypeLib::CSetFolderColor(FString FolderPath, FLinearColor FolderColor, bool bForceAdd)
|
||||
bool UOpenPypeLib::SetFolderColor(const FString& FolderPath, const FLinearColor& FolderColor, const bool& bForceAdd)
|
||||
{
|
||||
auto SaveColorInternal = [](FString InPath, FLinearColor InFolderColor)
|
||||
if (AssetViewUtils::DoesFolderExist(FolderPath))
|
||||
{
|
||||
// Saves the color of the folder to the config
|
||||
if (FPaths::FileExists(GEditorPerProjectIni))
|
||||
{
|
||||
GConfig->SetString(TEXT("PathColor"), *InPath, *InFolderColor.ToString(), GEditorPerProjectIni);
|
||||
}
|
||||
const TSharedPtr<FLinearColor> LinearColor = MakeShared<FLinearColor>(FolderColor);
|
||||
|
||||
};
|
||||
|
||||
SaveColorInternal(FolderPath, FolderColor);
|
||||
AssetViewUtils::SaveColor(FolderPath, LinearColor, true);
|
||||
UE_LOG(LogAssetData, Display, TEXT("A color {%s} has been set to folder \"%s\""), *LinearColor->ToString(),
|
||||
*FolderPath)
|
||||
return true;
|
||||
}
|
||||
|
||||
UE_LOG(LogAssetData, Display, TEXT("Setting a color {%s} to folder \"%s\" has failed! Directory doesn't exist!"),
|
||||
*FolderColor.ToString(), *FolderPath)
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all poperties on given object
|
||||
* @param cls - class
|
||||
|
|
|
|||
|
|
@ -4,8 +4,11 @@
|
|||
#include "AssetRegistry/AssetRegistryModule.h"
|
||||
#include "AssetToolsModule.h"
|
||||
#include "Framework/Notifications/NotificationManager.h"
|
||||
#include "OpenPypeLib.h"
|
||||
#include "OpenPypeSettings.h"
|
||||
#include "Widgets/Notifications/SNotificationList.h"
|
||||
|
||||
|
||||
//Moves all the invalid pointers to the end to prepare them for the shrinking
|
||||
#define REMOVE_INVALID_ENTRIES(VAR) VAR.CompactStable(); \
|
||||
VAR.Shrink();
|
||||
|
|
@ -16,8 +19,11 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj
|
|||
const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked<
|
||||
FAssetRegistryModule>("AssetRegistry");
|
||||
|
||||
const FPropertyEditorModule& PropertyEditorModule = FModuleManager::LoadModuleChecked<FPropertyEditorModule>(
|
||||
"PropertyEditor");
|
||||
|
||||
FString Left, Right;
|
||||
GetPathName().Split(GetName(), &Left, &Right);
|
||||
GetPathName().Split("/" + GetName(), &Left, &Right);
|
||||
|
||||
FARFilter Filter;
|
||||
Filter.PackagePaths.Emplace(FName(Left));
|
||||
|
|
@ -34,15 +40,17 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj
|
|||
AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated);
|
||||
AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved);
|
||||
AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated);
|
||||
|
||||
|
||||
|
||||
#ifdef WITH_EDITOR
|
||||
ColorOpenPypeDirs();
|
||||
#endif
|
||||
}
|
||||
|
||||
void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData)
|
||||
{
|
||||
TArray<FString> split;
|
||||
|
||||
const TObjectPtr<UObject> Asset = InAssetData.GetAsset();
|
||||
UObject* Asset = InAssetData.GetAsset();
|
||||
|
||||
if (!IsValid(Asset))
|
||||
{
|
||||
|
|
@ -58,7 +66,7 @@ void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData)
|
|||
if (AssetDataInternal.Emplace(Asset).IsValidId())
|
||||
{
|
||||
UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"),
|
||||
*this->GetName(), *Asset->GetName());
|
||||
*this->GetName(), *Asset->GetName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -86,7 +94,7 @@ void UOpenPypePublishInstance::OnAssetUpdated(const FAssetData& InAssetData)
|
|||
REMOVE_INVALID_ENTRIES(AssetDataExternal);
|
||||
}
|
||||
|
||||
bool UOpenPypePublishInstance::IsUnderSameDir(const TObjectPtr<UObject>& InAsset) const
|
||||
bool UOpenPypePublishInstance::IsUnderSameDir(const UObject* InAsset) const
|
||||
{
|
||||
FString ThisLeft, ThisRight;
|
||||
this->GetPathName().Split(this->GetName(), &ThisLeft, &ThisRight);
|
||||
|
|
@ -96,6 +104,48 @@ bool UOpenPypePublishInstance::IsUnderSameDir(const TObjectPtr<UObject>& InAsset
|
|||
|
||||
#ifdef WITH_EDITOR
|
||||
|
||||
void UOpenPypePublishInstance::ColorOpenPypeDirs()
|
||||
{
|
||||
FString PathName = this->GetPathName();
|
||||
|
||||
//Check whether the path contains the defined OpenPype folder
|
||||
if (!PathName.Contains(TEXT("OpenPype"))) return;
|
||||
|
||||
//Get the base path for open pype
|
||||
FString PathLeft, PathRight;
|
||||
PathName.Split(FString("OpenPype"), &PathLeft, &PathRight);
|
||||
|
||||
if (PathLeft.IsEmpty() || PathRight.IsEmpty())
|
||||
{
|
||||
UE_LOG(LogAssetData, Error, TEXT("Failed to retrieve the base OpenPype directory!"))
|
||||
return;
|
||||
}
|
||||
|
||||
PathName.RemoveFromEnd(PathRight, ESearchCase::CaseSensitive);
|
||||
|
||||
//Get the current settings
|
||||
const UOpenPypeSettings* Settings = GetMutableDefault<UOpenPypeSettings>();
|
||||
|
||||
//Color the base folder
|
||||
UOpenPypeLib::SetFolderColor(PathName, Settings->GetFolderFColor(), false);
|
||||
|
||||
//Get Sub paths, iterate through them and color them according to the folder color in UOpenPypeSettings
|
||||
const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked<FAssetRegistryModule>(
|
||||
"AssetRegistry");
|
||||
|
||||
TArray<FString> PathList;
|
||||
|
||||
AssetRegistryModule.Get().GetSubPaths(PathName, PathList, true);
|
||||
|
||||
if (PathList.Num() > 0)
|
||||
{
|
||||
for (const FString& Path : PathList)
|
||||
{
|
||||
UOpenPypeLib::SetFolderColor(Path, Settings->GetFolderFColor(), false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void UOpenPypePublishInstance::SendNotification(const FString& Text) const
|
||||
{
|
||||
FNotificationInfo Info{FText::FromString(Text)};
|
||||
|
|
@ -125,16 +175,15 @@ void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& Pro
|
|||
PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED(
|
||||
UOpenPypePublishInstance, AssetDataExternal))
|
||||
{
|
||||
|
||||
// Check for duplicated assets
|
||||
for (const auto& Asset : AssetDataInternal)
|
||||
{
|
||||
if (AssetDataExternal.Contains(Asset))
|
||||
{
|
||||
AssetDataExternal.Remove(Asset);
|
||||
return SendNotification("You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!");
|
||||
return SendNotification(
|
||||
"You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Check if no UOpenPypePublishInstance type assets are included
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
// Fill out your copyright notice in the Description page of Project Settings.
|
||||
|
||||
#include "OpenPypeSettings.h"
|
||||
|
||||
#include "Interfaces/IPluginManager.h"
|
||||
#include "UObject/UObjectGlobals.h"
|
||||
|
||||
/**
|
||||
* Mainly is used for initializing default values if the DefaultOpenPypeSettings.ini file does not exist in the saved config
|
||||
*/
|
||||
UOpenPypeSettings::UOpenPypeSettings(const FObjectInitializer& ObjectInitializer)
|
||||
{
|
||||
|
||||
const FString ConfigFilePath = OPENPYPE_SETTINGS_FILEPATH;
|
||||
|
||||
// This has to be probably in the future set using the UE Reflection system
|
||||
FColor Color;
|
||||
GConfig->GetColor(TEXT("/Script/OpenPype.OpenPypeSettings"), TEXT("FolderColor"), Color, ConfigFilePath);
|
||||
|
||||
FolderColor = Color;
|
||||
}
|
||||
|
|
@ -14,6 +14,8 @@ public:
|
|||
|
||||
private:
|
||||
void RegisterMenus();
|
||||
void RegisterSettings();
|
||||
bool HandleSettingsSaved();
|
||||
|
||||
void MenuPopup();
|
||||
void MenuDialog();
|
||||
|
|
|
|||
|
|
@ -5,14 +5,14 @@
|
|||
|
||||
|
||||
UCLASS(Blueprintable)
|
||||
class OPENPYPE_API UOpenPypeLib : public UObject
|
||||
class OPENPYPE_API UOpenPypeLib : public UBlueprintFunctionLibrary
|
||||
{
|
||||
|
||||
GENERATED_BODY()
|
||||
|
||||
public:
|
||||
UFUNCTION(BlueprintCallable, Category = Python)
|
||||
static void CSetFolderColor(FString FolderPath, FLinearColor FolderColor, bool bForceAdd);
|
||||
static bool SetFolderColor(const FString& FolderPath, const FLinearColor& FolderColor,const bool& bForceAdd);
|
||||
|
||||
UFUNCTION(BlueprintCallable, Category = Python)
|
||||
static TArray<FString> GetAllProperties(UClass* cls);
|
||||
|
|
|
|||
|
|
@ -8,7 +8,9 @@ UCLASS(Blueprintable)
|
|||
class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset
|
||||
{
|
||||
GENERATED_UCLASS_BODY()
|
||||
|
||||
public:
|
||||
/**
|
||||
/**
|
||||
* Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is
|
||||
* placed in)
|
||||
|
|
@ -55,8 +57,10 @@ public:
|
|||
UFUNCTION(BlueprintCallable, BlueprintPure)
|
||||
TSet<UObject*> GetAllAssets() const
|
||||
{
|
||||
const TSet<TSoftObjectPtr<UObject>>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal;
|
||||
|
||||
const TSet<TSoftObjectPtr<UObject>>& IteratedSet = bAddExternalAssets
|
||||
? AssetDataInternal.Union(AssetDataExternal)
|
||||
: AssetDataInternal;
|
||||
|
||||
//Create a new TSet only with raw pointers.
|
||||
TSet<UObject*> ResultSet;
|
||||
|
||||
|
|
@ -71,24 +75,26 @@ private:
|
|||
TSet<TSoftObjectPtr<UObject>> AssetDataInternal;
|
||||
|
||||
/**
|
||||
* This property allows the instance to include other assets from any other directory than what it's currently
|
||||
* monitoring.
|
||||
* @attention assets have to be added manually! They are not automatically registered or added!
|
||||
* This property allows exposing the array to include other assets from any other directory than what it's currently
|
||||
* monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added!
|
||||
*/
|
||||
UPROPERTY(EditAnywhere, Category="Assets")
|
||||
UPROPERTY(EditAnywhere, Category = "Assets")
|
||||
bool bAddExternalAssets = false;
|
||||
|
||||
UPROPERTY(EditAnywhere, Category="Assets", meta=(EditCondition="bAddExternalAssets"))
|
||||
UPROPERTY(EditAnywhere, meta=(EditCondition="bAddExternalAssets"), Category="Assets")
|
||||
TSet<TSoftObjectPtr<UObject>> AssetDataExternal;
|
||||
|
||||
|
||||
void OnAssetCreated(const FAssetData& InAssetData);
|
||||
void OnAssetRemoved(const FAssetData& InAssetData);
|
||||
void OnAssetUpdated(const FAssetData& InAssetData);
|
||||
|
||||
bool IsUnderSameDir(const TObjectPtr<UObject>& InAsset) const;
|
||||
bool IsUnderSameDir(const UObject* InAsset) const;
|
||||
|
||||
#ifdef WITH_EDITOR
|
||||
|
||||
void ColorOpenPypeDirs();
|
||||
|
||||
void SendNotification(const FString& Text) const;
|
||||
virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override;
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,32 @@
|
|||
// Fill out your copyright notice in the Description page of Project Settings.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "CoreMinimal.h"
|
||||
#include "UObject/Object.h"
|
||||
#include "OpenPypeSettings.generated.h"
|
||||
|
||||
#define OPENPYPE_SETTINGS_FILEPATH IPluginManager::Get().FindPlugin("OpenPype")->GetBaseDir() / TEXT("Config") / TEXT("DefaultOpenPypeSettings.ini")
|
||||
|
||||
UCLASS(Config=OpenPypeSettings, DefaultConfig)
|
||||
class OPENPYPE_API UOpenPypeSettings : public UObject
|
||||
{
|
||||
GENERATED_UCLASS_BODY()
|
||||
|
||||
UFUNCTION(BlueprintCallable, BlueprintPure, Category = Settings)
|
||||
FColor GetFolderFColor() const
|
||||
{
|
||||
return FolderColor;
|
||||
}
|
||||
|
||||
UFUNCTION(BlueprintCallable, BlueprintPure, Category = Settings)
|
||||
FLinearColor GetFolderFLinearColor() const
|
||||
{
|
||||
return FLinearColor(FolderColor);
|
||||
}
|
||||
|
||||
protected:
|
||||
|
||||
UPROPERTY(config, EditAnywhere, Category = Folders)
|
||||
FColor FolderColor = FColor(25,45,223);
|
||||
};
|
||||
|
|
@ -3,6 +3,7 @@ import socket
|
|||
import getpass
|
||||
|
||||
from openpype_modules.ftrack.lib import BaseAction
|
||||
from openpype_modules.ftrack.ftrack_server.lib import get_host_ip
|
||||
|
||||
|
||||
class ActionWhereIRun(BaseAction):
|
||||
|
|
@ -53,8 +54,7 @@ class ActionWhereIRun(BaseAction):
|
|||
try:
|
||||
host_name = socket.gethostname()
|
||||
msgs["Hostname"] = host_name
|
||||
host_ip = socket.gethostbyname(host_name)
|
||||
msgs["IP"] = host_ip
|
||||
msgs["IP"] = get_host_ip() or "N/A"
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ from openpype_modules.ftrack import (
|
|||
)
|
||||
from openpype_modules.ftrack.lib import credentials
|
||||
from openpype_modules.ftrack.ftrack_server import socket_thread
|
||||
from openpype_modules.ftrack.ftrack_server.lib import get_host_ip
|
||||
|
||||
|
||||
class MongoPermissionsError(Exception):
|
||||
|
|
@ -245,11 +246,13 @@ def main_loop(ftrack_url):
|
|||
)
|
||||
|
||||
host_name = socket.gethostname()
|
||||
host_ip = get_host_ip()
|
||||
|
||||
main_info = [
|
||||
["created_at", datetime.datetime.now().strftime("%Y.%m.%d %H:%M:%S")],
|
||||
["Username", getpass.getuser()],
|
||||
["Host Name", host_name],
|
||||
["Host IP", socket.gethostbyname(host_name)],
|
||||
["Host IP", host_ip or "N/A"],
|
||||
["OpenPype executable", get_openpype_execute_args()[-1]],
|
||||
["OpenPype version", get_openpype_version() or "N/A"],
|
||||
["OpenPype build version", get_build_version() or "N/A"]
|
||||
|
|
|
|||
|
|
@ -9,8 +9,9 @@ import time
|
|||
import queue
|
||||
import collections
|
||||
import appdirs
|
||||
import pymongo
|
||||
import socket
|
||||
|
||||
import pymongo
|
||||
import requests
|
||||
import ftrack_api
|
||||
import ftrack_api.session
|
||||
|
|
@ -32,6 +33,16 @@ TOPIC_STATUS_SERVER = "openpype.event.server.status"
|
|||
TOPIC_STATUS_SERVER_RESULT = "openpype.event.server.status.result"
|
||||
|
||||
|
||||
def get_host_ip():
|
||||
host_name = socket.gethostname()
|
||||
try:
|
||||
return socket.gethostbyname(host_name)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class SocketBaseEventHub(ftrack_api.event.hub.EventHub):
|
||||
|
||||
hearbeat_msg = b"hearbeat"
|
||||
|
|
|
|||
|
|
@ -15,7 +15,8 @@ from openpype_modules.ftrack.ftrack_server.lib import (
|
|||
SocketSession,
|
||||
StatusEventHub,
|
||||
TOPIC_STATUS_SERVER,
|
||||
TOPIC_STATUS_SERVER_RESULT
|
||||
TOPIC_STATUS_SERVER_RESULT,
|
||||
get_host_ip
|
||||
)
|
||||
from openpype.lib import (
|
||||
Logger,
|
||||
|
|
@ -29,10 +30,10 @@ log = Logger.get_logger("Event storer")
|
|||
action_identifier = (
|
||||
"event.server.status" + os.environ["FTRACK_EVENT_SUB_ID"]
|
||||
)
|
||||
host_ip = socket.gethostbyname(socket.gethostname())
|
||||
host_ip = get_host_ip()
|
||||
action_data = {
|
||||
"label": "OpenPype Admin",
|
||||
"variant": "- Event server Status ({})".format(host_ip),
|
||||
"variant": "- Event server Status ({})".format(host_ip or "IP N/A"),
|
||||
"description": "Get Infromation about event server",
|
||||
"actionIdentifier": action_identifier
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.lib.profiles_filtering import filter_profiles
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.lib import attribute_definitions
|
||||
from openpype.pipeline import OpenPypePyblishPluginMixin
|
||||
|
||||
|
||||
class CollectSlackFamilies(pyblish.api.InstancePlugin):
|
||||
class CollectSlackFamilies(pyblish.api.InstancePlugin,
|
||||
OpenPypePyblishPluginMixin):
|
||||
"""Collect family for Slack notification
|
||||
|
||||
Expects configured profile in
|
||||
|
|
@ -17,6 +19,18 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin):
|
|||
|
||||
profiles = None
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
attribute_definitions.TextDef(
|
||||
# Key under which it will be stored
|
||||
"additional_message",
|
||||
# Use plugin label as label for attribute
|
||||
label="Additional Slack message",
|
||||
placeholder="<Only if Slack is configured>"
|
||||
)
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
task_data = instance.data["anatomyData"].get("task", {})
|
||||
family = self.main_family_from_instance(instance)
|
||||
|
|
@ -55,6 +69,11 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin):
|
|||
["token"])
|
||||
instance.data["slack_token"] = slack_token
|
||||
|
||||
attribute_values = self.get_attr_values_from_data(instance.data)
|
||||
additional_message = attribute_values.get("additional_message")
|
||||
if additional_message:
|
||||
instance.data["slack_additional_message"] = additional_message
|
||||
|
||||
def main_family_from_instance(self, instance): # TODO yank from integrate
|
||||
"""Returns main family of entered instance."""
|
||||
family = instance.data.get("family")
|
||||
|
|
|
|||
|
|
@ -1,8 +1,11 @@
|
|||
import os
|
||||
import re
|
||||
import six
|
||||
import pyblish.api
|
||||
import copy
|
||||
from datetime import datetime
|
||||
from abc import ABCMeta, abstractmethod
|
||||
import time
|
||||
|
||||
from openpype.client import OpenPypeMongoConnection
|
||||
from openpype.lib.plugin_tools import prepare_template_data
|
||||
|
|
@ -31,11 +34,15 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
review_path = self._get_review_path(instance)
|
||||
|
||||
publish_files = set()
|
||||
message = ''
|
||||
additional_message = instance.data.get("slack_additional_message")
|
||||
token = instance.data["slack_token"]
|
||||
if additional_message:
|
||||
message = "{} \n".format(additional_message)
|
||||
for message_profile in instance.data["slack_channel_message_profiles"]:
|
||||
message = self._get_filled_message(message_profile["message"],
|
||||
instance,
|
||||
review_path)
|
||||
self.log.debug("message:: {}".format(message))
|
||||
message += self._get_filled_message(message_profile["message"],
|
||||
instance,
|
||||
review_path)
|
||||
if not message:
|
||||
return
|
||||
|
||||
|
|
@ -49,18 +56,16 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
project = instance.context.data["anatomyData"]["project"]["code"]
|
||||
for channel in message_profile["channels"]:
|
||||
if six.PY2:
|
||||
msg_id, file_ids = \
|
||||
self._python2_call(instance.data["slack_token"],
|
||||
channel,
|
||||
message,
|
||||
publish_files)
|
||||
client = SlackPython2Operations(token, self.log)
|
||||
else:
|
||||
msg_id, file_ids = \
|
||||
self._python3_call(instance.data["slack_token"],
|
||||
channel,
|
||||
message,
|
||||
publish_files)
|
||||
client = SlackPython3Operations(token, self.log)
|
||||
|
||||
users, groups = client.get_users_and_groups()
|
||||
message = self._translate_users(message, users, groups)
|
||||
|
||||
msg_id, file_ids = client.send_message(channel,
|
||||
message,
|
||||
publish_files)
|
||||
if not msg_id:
|
||||
return
|
||||
|
||||
|
|
@ -132,14 +137,14 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
fill_key = "task[{}]".format(key)
|
||||
fill_pairs.append((fill_key, value))
|
||||
|
||||
self.log.debug("fill_pairs ::{}".format(fill_pairs))
|
||||
multiple_case_variants = prepare_template_data(fill_pairs)
|
||||
fill_data.update(multiple_case_variants)
|
||||
|
||||
message = None
|
||||
message = ''
|
||||
try:
|
||||
message = message_templ.format(**fill_data)
|
||||
message = self._escape_missing_keys(message_templ, fill_data).\
|
||||
format(**fill_data)
|
||||
except Exception:
|
||||
# shouldn't happen
|
||||
self.log.warning(
|
||||
"Some keys are missing in {}".format(message_templ),
|
||||
exc_info=True)
|
||||
|
|
@ -162,27 +167,249 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
|
||||
def _get_review_path(self, instance):
|
||||
"""Returns abs url for review if present in instance repres"""
|
||||
published_path = None
|
||||
review_path = None
|
||||
for repre in instance.data.get("representations", []):
|
||||
tags = repre.get('tags', [])
|
||||
if (repre.get("review")
|
||||
or "review" in tags
|
||||
or "burnin" in tags):
|
||||
if os.path.exists(repre["published_path"]):
|
||||
published_path = repre["published_path"]
|
||||
repre_review_path = (
|
||||
repre.get("published_path") or
|
||||
os.path.join(repre["stagingDir"], repre["files"])
|
||||
)
|
||||
if os.path.exists(repre_review_path):
|
||||
review_path = repre_review_path
|
||||
if "burnin" in tags: # burnin has precedence if exists
|
||||
break
|
||||
return published_path
|
||||
return review_path
|
||||
|
||||
def _python2_call(self, token, channel, message, publish_files):
|
||||
from slackclient import SlackClient
|
||||
def _get_user_id(self, users, user_name):
|
||||
"""Returns internal slack id for user name"""
|
||||
user_id = None
|
||||
user_name_lower = user_name.lower()
|
||||
for user in users:
|
||||
if (not user.get("deleted") and
|
||||
(user_name_lower == user["name"].lower() or
|
||||
# bots dont have display_name
|
||||
user_name_lower == user["profile"].get("display_name",
|
||||
'').lower() or
|
||||
user_name_lower == user["profile"].get("real_name",
|
||||
'').lower())):
|
||||
user_id = user["id"]
|
||||
break
|
||||
return user_id
|
||||
|
||||
def _get_group_id(self, groups, group_name):
|
||||
"""Returns internal group id for string name"""
|
||||
group_id = None
|
||||
for group in groups:
|
||||
if (not group.get("date_delete") and
|
||||
(group_name.lower() == group["name"].lower() or
|
||||
group_name.lower() == group["handle"])):
|
||||
group_id = group["id"]
|
||||
break
|
||||
return group_id
|
||||
|
||||
def _translate_users(self, message, users, groups):
|
||||
"""Replace all occurences of @mentions with proper <@name> format."""
|
||||
matches = re.findall(r"(?<!<)@[^ ]+", message)
|
||||
in_quotes = re.findall(r"(?<!<)(['\"])(@[^'\"]+)", message)
|
||||
for item in in_quotes:
|
||||
matches.append(item[1])
|
||||
if not matches:
|
||||
return message
|
||||
|
||||
for orig_user in matches:
|
||||
user_name = orig_user.replace("@", '')
|
||||
slack_id = self._get_user_id(users, user_name)
|
||||
mention = None
|
||||
if slack_id:
|
||||
mention = "<@{}>".format(slack_id)
|
||||
else:
|
||||
slack_id = self._get_group_id(groups, user_name)
|
||||
if slack_id:
|
||||
mention = "<!subteam^{}>".format(slack_id)
|
||||
if mention:
|
||||
message = message.replace(orig_user, mention)
|
||||
|
||||
return message
|
||||
|
||||
def _escape_missing_keys(self, message, fill_data):
|
||||
"""Double escapes placeholder which are missing in 'fill_data'"""
|
||||
placeholder_keys = re.findall(r"\{([^}]+)\}", message)
|
||||
|
||||
fill_keys = []
|
||||
for key, value in fill_data.items():
|
||||
fill_keys.append(key)
|
||||
if isinstance(value, dict):
|
||||
for child_key in value.keys():
|
||||
fill_keys.append("{}[{}]".format(key, child_key))
|
||||
|
||||
not_matched = set(placeholder_keys) - set(fill_keys)
|
||||
|
||||
for not_matched_item in not_matched:
|
||||
message = message.replace("{}".format(not_matched_item),
|
||||
"{{{}}}".format(not_matched_item))
|
||||
|
||||
return message
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class AbstractSlackOperations:
|
||||
|
||||
@abstractmethod
|
||||
def _get_users_list(self):
|
||||
"""Return response with user list, different methods Python 2 vs 3"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def _get_usergroups_list(self):
|
||||
"""Return response with user list, different methods Python 2 vs 3"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def get_users_and_groups(self):
|
||||
"""Return users and groups, different retry in Python 2 vs 3"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def send_message(self, channel, message, publish_files):
|
||||
"""Sends message to channel, different methods in Python 2 vs 3"""
|
||||
pass
|
||||
|
||||
def _get_users(self):
|
||||
"""Parse users.list response into list of users (dicts)"""
|
||||
first = True
|
||||
next_page = None
|
||||
users = []
|
||||
while first or next_page:
|
||||
response = self._get_users_list()
|
||||
first = False
|
||||
next_page = response.get("response_metadata").get("next_cursor")
|
||||
for user in response.get("members"):
|
||||
users.append(user)
|
||||
|
||||
return users
|
||||
|
||||
def _get_groups(self):
|
||||
"""Parses usergroups.list response into list of groups (dicts)"""
|
||||
response = self._get_usergroups_list()
|
||||
groups = []
|
||||
for group in response.get("usergroups"):
|
||||
groups.append(group)
|
||||
return groups
|
||||
|
||||
def _enrich_error(self, error_str, channel):
|
||||
"""Enhance known errors with more helpful notations."""
|
||||
if 'not_in_channel' in error_str:
|
||||
# there is no file.write.public scope, app must be explicitly in
|
||||
# the channel
|
||||
msg = " - application must added to channel '{}'.".format(channel)
|
||||
error_str += msg + " Ask Slack admin."
|
||||
return error_str
|
||||
|
||||
|
||||
class SlackPython3Operations(AbstractSlackOperations):
|
||||
|
||||
def __init__(self, token, log):
|
||||
from slack_sdk import WebClient
|
||||
|
||||
self.client = WebClient(token=token)
|
||||
self.log = log
|
||||
|
||||
def _get_users_list(self):
|
||||
return self.client.users_list()
|
||||
|
||||
def _get_usergroups_list(self):
|
||||
return self.client.usergroups_list()
|
||||
|
||||
def get_users_and_groups(self):
|
||||
from slack_sdk.errors import SlackApiError
|
||||
while True:
|
||||
try:
|
||||
users = self._get_users()
|
||||
groups = self._get_groups()
|
||||
break
|
||||
except SlackApiError as e:
|
||||
retry_after = e.response.headers.get("Retry-After")
|
||||
if retry_after:
|
||||
print(
|
||||
"Rate limit hit, sleeping for {}".format(retry_after))
|
||||
time.sleep(int(retry_after))
|
||||
else:
|
||||
self.log.warning("Cannot pull user info, "
|
||||
"mentions won't work", exc_info=True)
|
||||
return [], []
|
||||
|
||||
return users, groups
|
||||
|
||||
def send_message(self, channel, message, publish_files):
|
||||
from slack_sdk.errors import SlackApiError
|
||||
try:
|
||||
attachment_str = "\n\n Attachment links: \n"
|
||||
file_ids = []
|
||||
for published_file in publish_files:
|
||||
response = self.client.files_upload(
|
||||
file=published_file,
|
||||
filename=os.path.basename(published_file))
|
||||
attachment_str += "\n<{}|{}>".format(
|
||||
response["file"]["permalink"],
|
||||
os.path.basename(published_file))
|
||||
file_ids.append(response["file"]["id"])
|
||||
|
||||
if publish_files:
|
||||
message += attachment_str
|
||||
|
||||
response = self.client.chat_postMessage(
|
||||
channel=channel,
|
||||
text=message
|
||||
)
|
||||
return response.data["ts"], file_ids
|
||||
except SlackApiError as e:
|
||||
# # You will get a SlackApiError if "ok" is False
|
||||
error_str = self._enrich_error(str(e.response["error"]), channel)
|
||||
self.log.warning("Error happened {}".format(error_str))
|
||||
except Exception as e:
|
||||
error_str = self._enrich_error(str(e), channel)
|
||||
self.log.warning("Not SlackAPI error", exc_info=True)
|
||||
|
||||
return None, []
|
||||
|
||||
|
||||
class SlackPython2Operations(AbstractSlackOperations):
|
||||
|
||||
def __init__(self, token, log):
|
||||
from slackclient import SlackClient
|
||||
|
||||
self.client = SlackClient(token=token)
|
||||
self.log = log
|
||||
|
||||
def _get_users_list(self):
|
||||
return self.client.api_call("users.list")
|
||||
|
||||
def _get_usergroups_list(self):
|
||||
return self.client.api_call("usergroups.list")
|
||||
|
||||
def get_users_and_groups(self):
|
||||
while True:
|
||||
try:
|
||||
users = self._get_users()
|
||||
groups = self._get_groups()
|
||||
break
|
||||
except Exception:
|
||||
self.log.warning("Cannot pull user info, "
|
||||
"mentions won't work", exc_info=True)
|
||||
return [], []
|
||||
|
||||
return users, groups
|
||||
|
||||
def send_message(self, channel, message, publish_files):
|
||||
try:
|
||||
client = SlackClient(token)
|
||||
attachment_str = "\n\n Attachment links: \n"
|
||||
file_ids = []
|
||||
for p_file in publish_files:
|
||||
with open(p_file, 'rb') as pf:
|
||||
response = client.api_call(
|
||||
response = self.client.api_call(
|
||||
"files.upload",
|
||||
file=pf,
|
||||
channel=channel,
|
||||
|
|
@ -203,7 +430,7 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
if publish_files:
|
||||
message += attachment_str
|
||||
|
||||
response = client.api_call(
|
||||
response = self.client.api_call(
|
||||
"chat.postMessage",
|
||||
channel=channel,
|
||||
text=message
|
||||
|
|
@ -220,46 +447,3 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
self.log.warning("Error happened: {}".format(error_str))
|
||||
|
||||
return None, []
|
||||
|
||||
def _python3_call(self, token, channel, message, publish_files):
|
||||
from slack_sdk import WebClient
|
||||
from slack_sdk.errors import SlackApiError
|
||||
try:
|
||||
client = WebClient(token=token)
|
||||
attachment_str = "\n\n Attachment links: \n"
|
||||
file_ids = []
|
||||
for published_file in publish_files:
|
||||
response = client.files_upload(
|
||||
file=published_file,
|
||||
filename=os.path.basename(published_file))
|
||||
attachment_str += "\n<{}|{}>".format(
|
||||
response["file"]["permalink"],
|
||||
os.path.basename(published_file))
|
||||
file_ids.append(response["file"]["id"])
|
||||
|
||||
if publish_files:
|
||||
message += attachment_str
|
||||
|
||||
response = client.chat_postMessage(
|
||||
channel=channel,
|
||||
text=message
|
||||
)
|
||||
return response.data["ts"], file_ids
|
||||
except SlackApiError as e:
|
||||
# You will get a SlackApiError if "ok" is False
|
||||
error_str = self._enrich_error(str(e.response["error"]), channel)
|
||||
self.log.warning("Error happened {}".format(error_str))
|
||||
except Exception as e:
|
||||
error_str = self._enrich_error(str(e), channel)
|
||||
self.log.warning("Not SlackAPI error", exc_info=True)
|
||||
|
||||
return None, []
|
||||
|
||||
def _enrich_error(self, error_str, channel):
|
||||
"""Enhance known errors with more helpful notations."""
|
||||
if 'not_in_channel' in error_str:
|
||||
# there is no file.write.public scope, app must be explicitly in
|
||||
# the channel
|
||||
msg = " - application must added to channel '{}'.".format(channel)
|
||||
error_str += msg + " Ask Slack admin."
|
||||
return error_str
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ class DropboxHandler(AbstractProvider):
|
|||
)
|
||||
return
|
||||
|
||||
if not self.presets["enabled"]:
|
||||
if not self.presets.get("enabled"):
|
||||
self.log.debug("Sync Server: Site {} not enabled for {}.".
|
||||
format(site_name, project_name))
|
||||
return
|
||||
|
|
@ -165,7 +165,7 @@ class DropboxHandler(AbstractProvider):
|
|||
Returns:
|
||||
(boolean)
|
||||
"""
|
||||
return self.presets["enabled"] and self.dbx is not None
|
||||
return self.presets.get("enabled") and self.dbx is not None
|
||||
|
||||
@classmethod
|
||||
def get_configurable_items(cls):
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ class GDriveHandler(AbstractProvider):
|
|||
)
|
||||
return
|
||||
|
||||
if not self.presets["enabled"]:
|
||||
if not self.presets.get("enabled"):
|
||||
self.log.debug(
|
||||
"Sync Server: Site {} not enabled for {}.".format(
|
||||
site_name, project_name
|
||||
|
|
@ -119,7 +119,7 @@ class GDriveHandler(AbstractProvider):
|
|||
Returns:
|
||||
(boolean)
|
||||
"""
|
||||
return self.presets["enabled"] and self.service is not None
|
||||
return self.presets.get("enabled") and self.service is not None
|
||||
|
||||
@classmethod
|
||||
def get_system_settings_schema(cls):
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import threading
|
|||
import time
|
||||
|
||||
from openpype.lib import Logger
|
||||
from openpype.lib.local_settings import get_local_site_id
|
||||
from openpype.pipeline import Anatomy
|
||||
from .abstract_provider import AbstractProvider
|
||||
|
||||
|
|
@ -220,6 +221,6 @@ class LocalDriveHandler(AbstractProvider):
|
|||
|
||||
def _normalize_site_name(self, site_name):
|
||||
"""Transform user id to 'local' for Local settings"""
|
||||
if site_name != 'studio':
|
||||
if site_name == get_local_site_id():
|
||||
return 'local'
|
||||
return site_name
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ class SFTPHandler(AbstractProvider):
|
|||
Returns:
|
||||
(boolean)
|
||||
"""
|
||||
return self.presets["enabled"] and self.conn is not None
|
||||
return self.presets.get("enabled") and self.conn is not None
|
||||
|
||||
@classmethod
|
||||
def get_system_settings_schema(cls):
|
||||
|
|
|
|||
|
|
@ -169,7 +169,7 @@ def resolve_paths(module, file_path, project_name,
|
|||
return local_file_path, remote_file_path
|
||||
|
||||
|
||||
def site_is_working(module, project_name, site_name):
|
||||
def _site_is_working(module, project_name, site_name, site_config):
|
||||
"""
|
||||
Confirm that 'site_name' is configured correctly for 'project_name'.
|
||||
|
||||
|
|
@ -179,54 +179,17 @@ def site_is_working(module, project_name, site_name):
|
|||
module (SyncServerModule)
|
||||
project_name(string):
|
||||
site_name(string):
|
||||
site_config (dict): configuration for site from Settings
|
||||
Returns
|
||||
(bool)
|
||||
"""
|
||||
if _get_configured_sites(module, project_name).get(site_name):
|
||||
return True
|
||||
return False
|
||||
provider = module.get_provider_for_site(site=site_name)
|
||||
handler = lib.factory.get_provider(provider,
|
||||
project_name,
|
||||
site_name,
|
||||
presets=site_config)
|
||||
|
||||
|
||||
def _get_configured_sites(module, project_name):
|
||||
"""
|
||||
Loops through settings and looks for configured sites and checks
|
||||
its handlers for particular 'project_name'.
|
||||
|
||||
Args:
|
||||
project_setting(dict): dictionary from Settings
|
||||
only_project_name(string, optional): only interested in
|
||||
particular project
|
||||
Returns:
|
||||
(dict of dict)
|
||||
{'ProjectA': {'studio':True, 'gdrive':False}}
|
||||
"""
|
||||
settings = module.get_sync_project_setting(project_name)
|
||||
return _get_configured_sites_from_setting(module, project_name, settings)
|
||||
|
||||
|
||||
def _get_configured_sites_from_setting(module, project_name, project_setting):
|
||||
if not project_setting.get("enabled"):
|
||||
return {}
|
||||
|
||||
initiated_handlers = {}
|
||||
configured_sites = {}
|
||||
all_sites = module._get_default_site_configs()
|
||||
all_sites.update(project_setting.get("sites"))
|
||||
for site_name, config in all_sites.items():
|
||||
provider = module.get_provider_for_site(site=site_name)
|
||||
handler = initiated_handlers.get((provider, site_name))
|
||||
if not handler:
|
||||
handler = lib.factory.get_provider(provider,
|
||||
project_name,
|
||||
site_name,
|
||||
presets=config)
|
||||
initiated_handlers[(provider, site_name)] = \
|
||||
handler
|
||||
|
||||
if handler.is_active():
|
||||
configured_sites[site_name] = True
|
||||
|
||||
return configured_sites
|
||||
return handler.is_active()
|
||||
|
||||
|
||||
class SyncServerThread(threading.Thread):
|
||||
|
|
@ -288,7 +251,8 @@ class SyncServerThread(threading.Thread):
|
|||
for project_name in enabled_projects:
|
||||
preset = self.module.sync_project_settings[project_name]
|
||||
|
||||
local_site, remote_site = self._working_sites(project_name)
|
||||
local_site, remote_site = self._working_sites(project_name,
|
||||
preset)
|
||||
if not all([local_site, remote_site]):
|
||||
continue
|
||||
|
||||
|
|
@ -464,7 +428,7 @@ class SyncServerThread(threading.Thread):
|
|||
self.timer.cancel()
|
||||
self.timer = None
|
||||
|
||||
def _working_sites(self, project_name):
|
||||
def _working_sites(self, project_name, sync_config):
|
||||
if self.module.is_project_paused(project_name):
|
||||
self.log.debug("Both sites same, skipping")
|
||||
return None, None
|
||||
|
|
@ -476,9 +440,12 @@ class SyncServerThread(threading.Thread):
|
|||
local_site, remote_site))
|
||||
return None, None
|
||||
|
||||
configured_sites = _get_configured_sites(self.module, project_name)
|
||||
if not all([local_site in configured_sites,
|
||||
remote_site in configured_sites]):
|
||||
local_site_config = sync_config.get('sites')[local_site]
|
||||
remote_site_config = sync_config.get('sites')[remote_site]
|
||||
if not all([_site_is_working(self.module, project_name, local_site,
|
||||
local_site_config),
|
||||
_site_is_working(self.module, project_name, remote_site,
|
||||
remote_site_config)]):
|
||||
self.log.debug(
|
||||
"Some of the sites {} - {} is not working properly".format(
|
||||
local_site, remote_site
|
||||
|
|
|
|||
|
|
@ -1368,13 +1368,19 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
"""
|
||||
sync_sett = self.sync_system_settings
|
||||
project_enabled = True
|
||||
project_settings = None
|
||||
if project_name:
|
||||
project_enabled = project_name in self.get_enabled_projects()
|
||||
project_settings = self.get_sync_project_setting(project_name)
|
||||
sync_enabled = sync_sett["enabled"] and project_enabled
|
||||
|
||||
system_sites = {}
|
||||
if sync_enabled:
|
||||
for site, detail in sync_sett.get("sites", {}).items():
|
||||
if project_settings:
|
||||
site_settings = project_settings["sites"].get(site)
|
||||
if site_settings:
|
||||
detail.update(site_settings)
|
||||
system_sites[site] = detail
|
||||
|
||||
system_sites.update(self._get_default_site_configs(sync_enabled,
|
||||
|
|
@ -1396,14 +1402,22 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
exclude_locals=True)
|
||||
roots = {}
|
||||
for root, config in anatomy_sett["roots"].items():
|
||||
roots[root] = config[platform.system().lower()]
|
||||
roots[root] = config
|
||||
studio_config = {
|
||||
'enabled': True,
|
||||
'provider': 'local_drive',
|
||||
"root": roots
|
||||
}
|
||||
all_sites = {self.DEFAULT_SITE: studio_config}
|
||||
if sync_enabled:
|
||||
all_sites[get_local_site_id()] = {'provider': 'local_drive'}
|
||||
all_sites[get_local_site_id()] = {'enabled': True,
|
||||
'provider': 'local_drive',
|
||||
"root": roots}
|
||||
# duplicate values for normalized local name
|
||||
all_sites["local"] = {
|
||||
'enabled': True,
|
||||
'provider': 'local_drive',
|
||||
"root": roots}
|
||||
return all_sites
|
||||
|
||||
def get_provider_for_site(self, project_name=None, site=None):
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@ from openpype.lib.path_templates import (
|
|||
FormatObject,
|
||||
)
|
||||
from openpype.lib.log import Logger
|
||||
from openpype.lib import get_local_site_id
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
|
@ -60,6 +61,11 @@ class BaseAnatomy(object):
|
|||
project_name = project_doc["name"]
|
||||
self.project_name = project_name
|
||||
|
||||
if (site_name and
|
||||
site_name not in ["studio", "local", get_local_site_id()]):
|
||||
raise RuntimeError("Anatomy could be created only for default "
|
||||
"local sites not for {}".format(site_name))
|
||||
|
||||
self._site_name = site_name
|
||||
|
||||
self._data = self._prepare_anatomy_data(
|
||||
|
|
|
|||
|
|
@ -608,7 +608,7 @@ def discover_legacy_creator_plugins():
|
|||
plugin.apply_settings(project_settings, system_settings)
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Failed to apply settings to loader {}".format(
|
||||
"Failed to apply settings to creator {}".format(
|
||||
plugin.__name__
|
||||
),
|
||||
exc_info=True
|
||||
|
|
|
|||
|
|
@ -42,7 +42,9 @@ from openpype.pipeline.load import (
|
|||
get_contexts_for_repre_docs,
|
||||
load_with_repre_context,
|
||||
)
|
||||
from openpype.pipeline.create import get_legacy_creator_by_name
|
||||
from openpype.pipeline.create import (
|
||||
discover_legacy_creator_plugins
|
||||
)
|
||||
|
||||
|
||||
class TemplateNotFound(Exception):
|
||||
|
|
@ -235,7 +237,14 @@ class AbstractTemplateBuilder(object):
|
|||
|
||||
def get_creators_by_name(self):
|
||||
if self._creators_by_name is None:
|
||||
self._creators_by_name = get_legacy_creator_by_name()
|
||||
self._creators_by_name = {}
|
||||
for creator in discover_legacy_creator_plugins():
|
||||
creator_name = creator.__name__
|
||||
if creator_name in self._creators_by_name:
|
||||
raise KeyError(
|
||||
"Duplicated creator name {} !".format(creator_name)
|
||||
)
|
||||
self._creators_by_name[creator_name] = creator
|
||||
return self._creators_by_name
|
||||
|
||||
def get_shared_data(self, key):
|
||||
|
|
@ -401,7 +410,12 @@ class AbstractTemplateBuilder(object):
|
|||
key=lambda i: i.order
|
||||
))
|
||||
|
||||
def build_template(self, template_path=None, level_limit=None):
|
||||
def build_template(
|
||||
self,
|
||||
template_path=None,
|
||||
level_limit=None,
|
||||
keep_placeholders=None
|
||||
):
|
||||
"""Main callback for building workfile from template path.
|
||||
|
||||
Todo:
|
||||
|
|
@ -410,16 +424,25 @@ class AbstractTemplateBuilder(object):
|
|||
|
||||
Args:
|
||||
template_path (str): Path to a template file with placeholders.
|
||||
Template from settings 'get_template_path' used when not
|
||||
Template from settings 'get_template_preset' used when not
|
||||
passed.
|
||||
level_limit (int): Limit of populate loops. Related to
|
||||
'populate_scene_placeholders' method.
|
||||
keep_placeholders (bool): Add flag to placeholder data for
|
||||
hosts to decide if they want to remove
|
||||
placeholder after it is used.
|
||||
"""
|
||||
template_preset = self.get_template_preset()
|
||||
|
||||
if template_path is None:
|
||||
template_path = self.get_template_path()
|
||||
template_path = template_preset["path"]
|
||||
|
||||
if keep_placeholders is None:
|
||||
keep_placeholders = template_preset["keep_placeholder"]
|
||||
|
||||
self.import_template(template_path)
|
||||
self.populate_scene_placeholders(level_limit)
|
||||
self.populate_scene_placeholders(
|
||||
level_limit, keep_placeholders)
|
||||
|
||||
def rebuild_template(self):
|
||||
"""Go through existing placeholders in scene and update them.
|
||||
|
|
@ -489,7 +512,9 @@ class AbstractTemplateBuilder(object):
|
|||
plugin = plugins_by_identifier[identifier]
|
||||
plugin.prepare_placeholders(placeholders)
|
||||
|
||||
def populate_scene_placeholders(self, level_limit=None):
|
||||
def populate_scene_placeholders(
|
||||
self, level_limit=None, keep_placeholders=None
|
||||
):
|
||||
"""Find placeholders in scene using plugins and process them.
|
||||
|
||||
This should happen after 'import_template'.
|
||||
|
|
@ -505,6 +530,9 @@ class AbstractTemplateBuilder(object):
|
|||
|
||||
Args:
|
||||
level_limit (int): Level of loops that can happen. Default is 1000.
|
||||
keep_placeholders (bool): Add flag to placeholder data for
|
||||
hosts to decide if they want to remove
|
||||
placeholder after it is used.
|
||||
"""
|
||||
|
||||
if not self.placeholder_plugins:
|
||||
|
|
@ -541,6 +569,11 @@ class AbstractTemplateBuilder(object):
|
|||
" is already in progress."
|
||||
))
|
||||
continue
|
||||
|
||||
# add flag for keeping placeholders in scene
|
||||
# after they are processed
|
||||
placeholder.data["keep_placeholder"] = keep_placeholders
|
||||
|
||||
filtered_placeholders.append(placeholder)
|
||||
|
||||
self._prepare_placeholders(filtered_placeholders)
|
||||
|
|
@ -599,8 +632,8 @@ class AbstractTemplateBuilder(object):
|
|||
["profiles"]
|
||||
)
|
||||
|
||||
def get_template_path(self):
|
||||
"""Unified way how template path is received usign settings.
|
||||
def get_template_preset(self):
|
||||
"""Unified way how template preset is received usign settings.
|
||||
|
||||
Method is dependent on '_get_build_profiles' which should return filter
|
||||
profiles to resolve path to a template. Default implementation looks
|
||||
|
|
@ -637,6 +670,13 @@ class AbstractTemplateBuilder(object):
|
|||
).format(task_name, task_type, host_name))
|
||||
|
||||
path = profile["path"]
|
||||
|
||||
# switch to remove placeholders after they are used
|
||||
keep_placeholder = profile.get("keep_placeholder")
|
||||
# backward compatibility, since default is True
|
||||
if keep_placeholder is None:
|
||||
keep_placeholder = True
|
||||
|
||||
if not path:
|
||||
raise TemplateLoadFailed((
|
||||
"Template path is not set.\n"
|
||||
|
|
@ -650,14 +690,24 @@ class AbstractTemplateBuilder(object):
|
|||
key: value
|
||||
for key, value in os.environ.items()
|
||||
}
|
||||
|
||||
fill_data["root"] = anatomy.roots
|
||||
fill_data["project"] = {
|
||||
"name": project_name,
|
||||
"code": anatomy["attributes"]["code"]
|
||||
}
|
||||
|
||||
|
||||
result = StringTemplate.format_template(path, fill_data)
|
||||
if result.solved:
|
||||
path = result.normalized()
|
||||
|
||||
if path and os.path.exists(path):
|
||||
self.log.info("Found template at: '{}'".format(path))
|
||||
return path
|
||||
return {
|
||||
"path": path,
|
||||
"keep_placeholder": keep_placeholder
|
||||
}
|
||||
|
||||
solved_path = None
|
||||
while True:
|
||||
|
|
@ -683,7 +733,10 @@ class AbstractTemplateBuilder(object):
|
|||
|
||||
self.log.info("Found template at: '{}'".format(solved_path))
|
||||
|
||||
return solved_path
|
||||
return {
|
||||
"path": solved_path,
|
||||
"keep_placeholder": keep_placeholder
|
||||
}
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
|
|
@ -1002,7 +1055,13 @@ class PlaceholderItem(object):
|
|||
return self._log
|
||||
|
||||
def __repr__(self):
|
||||
return "< {} {} >".format(self.__class__.__name__, self.name)
|
||||
name = None
|
||||
if hasattr("name", self):
|
||||
name = self.name
|
||||
if hasattr("_scene_identifier ", self):
|
||||
name = self._scene_identifier
|
||||
|
||||
return "< {} {} >".format(self.__class__.__name__, name)
|
||||
|
||||
@property
|
||||
def order(self):
|
||||
|
|
@ -1426,6 +1485,173 @@ class PlaceholderLoadMixin(object):
|
|||
pass
|
||||
|
||||
|
||||
class PlaceholderCreateMixin(object):
|
||||
"""Mixin prepared for creating placeholder plugins.
|
||||
|
||||
Implementation prepares options for placeholders with
|
||||
'get_create_plugin_options'.
|
||||
|
||||
For placeholder population is implemented 'populate_create_placeholder'.
|
||||
|
||||
PlaceholderItem can have implemented methods:
|
||||
- 'create_failed' - called when creating of an instance failed
|
||||
- 'create_succeed' - called when creating of an instance succeeded
|
||||
"""
|
||||
|
||||
def get_create_plugin_options(self, options=None):
|
||||
"""Unified attribute definitions for create placeholder.
|
||||
|
||||
Common function for placeholder plugins used for creating of
|
||||
publishable instances. Use it with 'get_placeholder_options'.
|
||||
|
||||
Args:
|
||||
plugin (PlaceholderPlugin): Plugin used for creating of
|
||||
publish instances.
|
||||
options (Dict[str, Any]): Already available options which are used
|
||||
as defaults for attributes.
|
||||
|
||||
Returns:
|
||||
List[AbtractAttrDef]: Attribute definitions common for create
|
||||
plugins.
|
||||
"""
|
||||
|
||||
creators_by_name = self.builder.get_creators_by_name()
|
||||
|
||||
creator_items = [
|
||||
(creator_name, creator.label or creator_name)
|
||||
for creator_name, creator in creators_by_name.items()
|
||||
]
|
||||
|
||||
creator_items.sort(key=lambda i: i[1])
|
||||
options = options or {}
|
||||
return [
|
||||
attribute_definitions.UISeparatorDef(),
|
||||
attribute_definitions.UILabelDef("Main attributes"),
|
||||
attribute_definitions.UISeparatorDef(),
|
||||
|
||||
attribute_definitions.EnumDef(
|
||||
"creator",
|
||||
label="Creator",
|
||||
default=options.get("creator"),
|
||||
items=creator_items,
|
||||
tooltip=(
|
||||
"Creator"
|
||||
"\nDefines what OpenPype creator will be used to"
|
||||
" create publishable instance."
|
||||
"\nUseable creator depends on current host's creator list."
|
||||
"\nField is case sensitive."
|
||||
)
|
||||
),
|
||||
attribute_definitions.TextDef(
|
||||
"create_variant",
|
||||
label="Variant",
|
||||
default=options.get("create_variant"),
|
||||
placeholder='Main',
|
||||
tooltip=(
|
||||
"Creator"
|
||||
"\nDefines variant name which will be use for "
|
||||
"\ncompiling of subset name."
|
||||
)
|
||||
),
|
||||
attribute_definitions.UISeparatorDef(),
|
||||
attribute_definitions.NumberDef(
|
||||
"order",
|
||||
label="Order",
|
||||
default=options.get("order") or 0,
|
||||
decimals=0,
|
||||
minimum=0,
|
||||
maximum=999,
|
||||
tooltip=(
|
||||
"Order"
|
||||
"\nOrder defines creating instance priority (0 to 999)"
|
||||
"\nPriority rule is : \"lowest is first to load\"."
|
||||
)
|
||||
)
|
||||
]
|
||||
|
||||
def populate_create_placeholder(self, placeholder):
|
||||
"""Create placeholder is going to create matching publishabe instance.
|
||||
|
||||
Args:
|
||||
placeholder (PlaceholderItem): Placeholder item with information
|
||||
about requested publishable instance.
|
||||
"""
|
||||
creator_name = placeholder.data["creator"]
|
||||
create_variant = placeholder.data["create_variant"]
|
||||
|
||||
creator_plugin = self.builder.get_creators_by_name()[creator_name]
|
||||
|
||||
# create subset name
|
||||
project_name = legacy_io.Session["AVALON_PROJECT"]
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
|
||||
# get asset id
|
||||
asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"])
|
||||
assert asset_doc, "No current asset found in Session"
|
||||
asset_id = asset_doc['_id']
|
||||
|
||||
subset_name = creator_plugin.get_subset_name(
|
||||
create_variant,
|
||||
task_name,
|
||||
asset_id,
|
||||
project_name
|
||||
)
|
||||
|
||||
creator_data = {
|
||||
"creator_name": creator_name,
|
||||
"create_variant": create_variant,
|
||||
"subset_name": subset_name,
|
||||
"creator_plugin": creator_plugin
|
||||
}
|
||||
|
||||
self._before_instance_create(placeholder)
|
||||
|
||||
# compile subset name from variant
|
||||
try:
|
||||
creator_instance = creator_plugin(
|
||||
subset_name,
|
||||
asset_name
|
||||
).process()
|
||||
|
||||
except Exception:
|
||||
failed = True
|
||||
self.create_failed(placeholder, creator_data)
|
||||
|
||||
else:
|
||||
failed = False
|
||||
self.create_succeed(placeholder, creator_instance)
|
||||
|
||||
self.cleanup_placeholder(placeholder, failed)
|
||||
|
||||
def create_failed(self, placeholder, creator_data):
|
||||
if hasattr(placeholder, "create_failed"):
|
||||
placeholder.create_failed(creator_data)
|
||||
|
||||
def create_succeed(self, placeholder, creator_instance):
|
||||
if hasattr(placeholder, "create_succeed"):
|
||||
placeholder.create_succeed(creator_instance)
|
||||
|
||||
def cleanup_placeholder(self, placeholder, failed):
|
||||
"""Cleanup placeholder after load of single representation.
|
||||
|
||||
Can be called multiple times during placeholder item populating and is
|
||||
called even if loading failed.
|
||||
|
||||
Args:
|
||||
placeholder (PlaceholderItem): Item which was just used to load
|
||||
representation.
|
||||
failed (bool): Loading of representation failed.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def _before_instance_create(self, placeholder):
|
||||
"""Can be overriden. Is called before instance is created."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class LoadPlaceholderItem(PlaceholderItem):
|
||||
"""PlaceholderItem for plugin which is loading representations.
|
||||
|
||||
|
|
@ -1449,3 +1675,28 @@ class LoadPlaceholderItem(PlaceholderItem):
|
|||
|
||||
def load_failed(self, representation):
|
||||
self._failed_representations.append(representation)
|
||||
|
||||
|
||||
class CreatePlaceholderItem(PlaceholderItem):
|
||||
"""PlaceholderItem for plugin which is creating publish instance.
|
||||
|
||||
Connected to 'PlaceholderCreateMixin'.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreatePlaceholderItem, self).__init__(*args, **kwargs)
|
||||
self._failed_created_publish_instances = []
|
||||
|
||||
def get_errors(self):
|
||||
if not self._failed_representations:
|
||||
return []
|
||||
message = (
|
||||
"Failed to create {} instance using Creator {}"
|
||||
).format(
|
||||
len(self._failed_created_publish_instances),
|
||||
self.data["creator"]
|
||||
)
|
||||
return [message]
|
||||
|
||||
def create_failed(self, creator_data):
|
||||
self._failed_created_publish_instances.append(creator_data)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
"deadline_servers": [],
|
||||
"publish": {
|
||||
"CollectDefaultDeadlineServer": {
|
||||
"pass_mongo_url": false
|
||||
"pass_mongo_url": true
|
||||
},
|
||||
"CollectDeadlinePools": {
|
||||
"primary_pool": "",
|
||||
|
|
|
|||
|
|
@ -25,8 +25,15 @@
|
|||
{
|
||||
"key": "path",
|
||||
"label": "Path to template",
|
||||
"type": "text",
|
||||
"object_type": "text"
|
||||
"type": "path",
|
||||
"multiplatform": false,
|
||||
"multipath": false
|
||||
},
|
||||
{
|
||||
"key": "keep_placeholder",
|
||||
"label": "Keep placeholders",
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -248,6 +248,9 @@ class SitesWidget(QtWidgets.QWidget):
|
|||
main_layout.addWidget(comboboxes_widget, 0)
|
||||
main_layout.addWidget(content_widget, 1)
|
||||
|
||||
active_site_widget.value_changed.connect(self.refresh)
|
||||
remote_site_widget.value_changed.connect(self.refresh)
|
||||
|
||||
self.active_site_widget = active_site_widget
|
||||
self.remote_site_widget = remote_site_widget
|
||||
|
||||
|
|
@ -268,25 +271,29 @@ class SitesWidget(QtWidgets.QWidget):
|
|||
self.modules_manager.modules_by_name["sync_server"]
|
||||
)
|
||||
|
||||
# This is temporary modification
|
||||
# - whole logic here should be in sync module's providers
|
||||
site_names = sync_server_module.get_active_sites_from_settings(
|
||||
self.project_settings["project_settings"].value
|
||||
)
|
||||
site_configs = sync_server_module.get_all_site_configs(
|
||||
self._project_name)
|
||||
|
||||
roots_entity = (
|
||||
self.project_settings[PROJECT_ANATOMY_KEY][LOCAL_ROOTS_KEY]
|
||||
)
|
||||
|
||||
site_names = [self.active_site_widget.current_text(),
|
||||
self.remote_site_widget.current_text()]
|
||||
output = []
|
||||
for site_name in site_names:
|
||||
if not site_name:
|
||||
continue
|
||||
|
||||
site_inputs = []
|
||||
for root_name, path_entity in roots_entity.items():
|
||||
platform_entity = path_entity[platform.system().lower()]
|
||||
site_config = site_configs[site_name]
|
||||
for root_name, path_entity in site_config.get("root", {}).items():
|
||||
if not path_entity:
|
||||
continue
|
||||
platform_value = path_entity[platform.system().lower()]
|
||||
site_inputs.append({
|
||||
"label": root_name,
|
||||
"key": root_name,
|
||||
"value": platform_entity.value
|
||||
"value": platform_value
|
||||
})
|
||||
|
||||
output.append(
|
||||
|
|
@ -436,6 +443,7 @@ class SitesWidget(QtWidgets.QWidget):
|
|||
|
||||
class _SiteCombobox(QtWidgets.QWidget):
|
||||
input_label = None
|
||||
value_changed = QtCore.Signal()
|
||||
|
||||
def __init__(self, modules_manager, project_settings, parent):
|
||||
super(_SiteCombobox, self).__init__(parent)
|
||||
|
|
@ -661,6 +669,7 @@ class _SiteCombobox(QtWidgets.QWidget):
|
|||
|
||||
self._set_local_settings_value(self.current_text())
|
||||
self._update_style()
|
||||
self.value_changed.emit()
|
||||
|
||||
def _set_local_settings_value(self, value):
|
||||
raise NotImplementedError(
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring Pype version."""
|
||||
__version__ = "3.14.10-nightly.2"
|
||||
__version__ = "3.14.10-nightly.7"
|
||||
|
|
|
|||
|
|
@ -105,16 +105,19 @@ class ScrollMessageBox(QtWidgets.QDialog):
|
|||
content_widget = QtWidgets.QWidget(self)
|
||||
scroll_widget.setWidget(content_widget)
|
||||
|
||||
max_len = 0
|
||||
message_len = 0
|
||||
content_layout = QtWidgets.QVBoxLayout(content_widget)
|
||||
for message in messages:
|
||||
label_widget = QtWidgets.QLabel(message, content_widget)
|
||||
content_layout.addWidget(label_widget)
|
||||
max_len = max(max_len, len(message))
|
||||
message_len = max(message_len, len(message))
|
||||
|
||||
# guess size of scrollable area
|
||||
max_width = QtWidgets.QApplication.desktop().availableGeometry().width
|
||||
scroll_widget.setMinimumWidth(min(max_width, max_len * 6))
|
||||
desktop = QtWidgets.QApplication.desktop()
|
||||
max_width = desktop.availableGeometry().width()
|
||||
scroll_widget.setMinimumWidth(
|
||||
min(max_width, message_len * 6)
|
||||
)
|
||||
layout.addWidget(scroll_widget)
|
||||
|
||||
if not cancelable: # if no specific buttons OK only
|
||||
|
|
|
|||
|
|
@ -94,6 +94,16 @@ Few keys also have Capitalized and UPPERCASE format. Values will be modified acc
|
|||
Here you can find review {review_filepath}
|
||||
```
|
||||
|
||||
##### Dynamic message for artists
|
||||
If artists uses host with implemented Publisher (new UI for publishing, implemented in Tray Publisher, Adobe products etc), it is possible for
|
||||
them to add additional message (notification for specific users for example, artists must provide proper user id with '@').
|
||||
Additional message will be sent only if at least one profile, eg. one target channel is configured.
|
||||
All available template keys (see higher) could be used here as a placeholder too.
|
||||
|
||||
#### User or group notifications
|
||||
Message template or dynamic data could contain user or group notification, it must be in format @artist.name, '@John Doe' or "@admin group" for display name containing space.
|
||||
If value prefixed with @ is not resolved and Slack user is not found, message will contain same value (not translated by Slack into link and proper mention.)
|
||||
|
||||
#### Message retention
|
||||
Currently no purging of old messages is implemented in Openpype. Admins of Slack should set their own retention of messages and files per channel.
|
||||
(see https://slack.com/help/articles/203457187-Customize-message-and-file-retention-policies)
|
||||
|
|
|
|||
|
|
@ -4740,9 +4740,9 @@ json-schema-traverse@^1.0.0:
|
|||
integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==
|
||||
|
||||
json5@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe"
|
||||
integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593"
|
||||
integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==
|
||||
dependencies:
|
||||
minimist "^1.2.0"
|
||||
|
||||
|
|
@ -5154,16 +5154,11 @@ minimatch@^3.0.4:
|
|||
dependencies:
|
||||
brace-expansion "^1.1.7"
|
||||
|
||||
minimist@^1.2.0:
|
||||
minimist@^1.2.0, minimist@^1.2.5:
|
||||
version "1.2.7"
|
||||
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18"
|
||||
integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==
|
||||
|
||||
minimist@^1.2.5:
|
||||
version "1.2.6"
|
||||
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44"
|
||||
integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==
|
||||
|
||||
mkdirp@^0.5.5:
|
||||
version "0.5.5"
|
||||
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue