move substance painter integration to server-addon and rename the folder as ayon_substancepainter

This commit is contained in:
Kayla Man 2024-05-15 16:06:10 +08:00
parent 082a0b0f15
commit 700967a28b
36 changed files with 2374 additions and 0 deletions

View file

@ -0,0 +1,425 @@
# -*- coding: utf-8 -*-
"""Pipeline tools for OpenPype Substance Painter integration."""
import os
import logging
from functools import partial
# Substance 3D Painter modules
import substance_painter.ui
import substance_painter.event
import substance_painter.project
import pyblish.api
from ayon_core.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost
from ayon_core.settings import get_current_project_settings
from ayon_core.pipeline.template_data import get_template_data_with_names
from ayon_core.pipeline import (
register_creator_plugin_path,
register_loader_plugin_path,
AVALON_CONTAINER_ID,
Anatomy,
)
from ayon_core.lib import (
StringTemplate,
register_event_callback,
emit_event,
)
from ayon_core.pipeline.load import any_outdated_containers
from ayon_substancepainter import SUBSTANCE_HOST_DIR
from . import lib
log = logging.getLogger("ayon_core.hosts.substance")
PLUGINS_DIR = os.path.join(SUBSTANCE_HOST_DIR, "plugins")
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
OPENPYPE_METADATA_KEY = "OpenPype"
OPENPYPE_METADATA_CONTAINERS_KEY = "containers" # child key
OPENPYPE_METADATA_CONTEXT_KEY = "context" # child key
OPENPYPE_METADATA_INSTANCES_KEY = "instances" # child key
class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
name = "substancepainter"
def __init__(self):
super(SubstanceHost, self).__init__()
self._has_been_setup = False
self.menu = None
self.callbacks = []
self.shelves = []
def install(self):
pyblish.api.register_host("substancepainter")
pyblish.api.register_plugin_path(PUBLISH_PATH)
register_loader_plugin_path(LOAD_PATH)
register_creator_plugin_path(CREATE_PATH)
log.info("Installing callbacks ... ")
# register_event_callback("init", on_init)
self._register_callbacks()
# register_event_callback("before.save", before_save)
# register_event_callback("save", on_save)
register_event_callback("open", on_open)
# register_event_callback("new", on_new)
log.info("Installing menu ... ")
self._install_menu()
project_settings = get_current_project_settings()
self._install_shelves(project_settings)
self._has_been_setup = True
def uninstall(self):
self._uninstall_shelves()
self._uninstall_menu()
self._deregister_callbacks()
def workfile_has_unsaved_changes(self):
if not substance_painter.project.is_open():
return False
return substance_painter.project.needs_saving()
def get_workfile_extensions(self):
return [".spp", ".toc"]
def save_workfile(self, dst_path=None):
if not substance_painter.project.is_open():
return False
if not dst_path:
dst_path = self.get_current_workfile()
full_save_mode = substance_painter.project.ProjectSaveMode.Full
substance_painter.project.save_as(dst_path, full_save_mode)
return dst_path
def open_workfile(self, filepath):
if not os.path.exists(filepath):
raise RuntimeError("File does not exist: {}".format(filepath))
# We must first explicitly close current project before opening another
if substance_painter.project.is_open():
substance_painter.project.close()
substance_painter.project.open(filepath)
return filepath
def get_current_workfile(self):
if not substance_painter.project.is_open():
return None
filepath = substance_painter.project.file_path()
if filepath and filepath.endswith(".spt"):
# When currently in a Substance Painter template assume our
# scene isn't saved. This can be the case directly after doing
# "New project", the path will then be the template used. This
# avoids Workfiles tool trying to save as .spt extension if the
# file hasn't been saved before.
return
return filepath
def get_containers(self):
if not substance_painter.project.is_open():
return
metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY)
containers = metadata.get(OPENPYPE_METADATA_CONTAINERS_KEY)
if containers:
for key, container in containers.items():
container["objectName"] = key
yield container
def update_context_data(self, data, changes):
if not substance_painter.project.is_open():
return
metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY)
metadata.set(OPENPYPE_METADATA_CONTEXT_KEY, data)
def get_context_data(self):
if not substance_painter.project.is_open():
return
metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY)
return metadata.get(OPENPYPE_METADATA_CONTEXT_KEY) or {}
def _install_menu(self):
from PySide2 import QtWidgets
from ayon_core.tools.utils import host_tools
parent = substance_painter.ui.get_main_window()
tab_menu_label = os.environ.get("AYON_MENU_LABEL") or "AYON"
menu = QtWidgets.QMenu(tab_menu_label)
action = menu.addAction("Create...")
action.triggered.connect(
lambda: host_tools.show_publisher(parent=parent,
tab="create")
)
action = menu.addAction("Load...")
action.triggered.connect(
lambda: host_tools.show_loader(parent=parent, use_context=True)
)
action = menu.addAction("Publish...")
action.triggered.connect(
lambda: host_tools.show_publisher(parent=parent,
tab="publish")
)
action = menu.addAction("Manage...")
action.triggered.connect(
lambda: host_tools.show_scene_inventory(parent=parent)
)
action = menu.addAction("Library...")
action.triggered.connect(
lambda: host_tools.show_library_loader(parent=parent)
)
menu.addSeparator()
action = menu.addAction("Work Files...")
action.triggered.connect(
lambda: host_tools.show_workfiles(parent=parent)
)
substance_painter.ui.add_menu(menu)
def on_menu_destroyed():
self.menu = None
menu.destroyed.connect(on_menu_destroyed)
self.menu = menu
def _uninstall_menu(self):
if self.menu:
self.menu.destroy()
self.menu = None
def _register_callbacks(self):
# Prepare emit event callbacks
open_callback = partial(emit_event, "open")
# Connect to the Substance Painter events
dispatcher = substance_painter.event.DISPATCHER
for event, callback in [
(substance_painter.event.ProjectOpened, open_callback)
]:
dispatcher.connect(event, callback)
# Keep a reference so we can deregister if needed
self.callbacks.append((event, callback))
def _deregister_callbacks(self):
for event, callback in self.callbacks:
substance_painter.event.DISPATCHER.disconnect(event, callback)
self.callbacks.clear()
def _install_shelves(self, project_settings):
shelves = project_settings["substancepainter"].get("shelves", [])
if not shelves:
return
# Prepare formatting data if we detect any path which might have
# template tokens like {folder[name]} in there.
formatting_data = {}
has_formatting_entries = any("{" in item["value"] for item in shelves)
if has_formatting_entries:
project_name = self.get_current_project_name()
folder_path = self.get_current_folder_path()
task_name = self.get_current_task_name()
formatting_data = get_template_data_with_names(
project_name, folder_path, task_name, project_settings
)
anatomy = Anatomy(project_name)
formatting_data["root"] = anatomy.roots
for shelve_item in shelves:
# Allow formatting with anatomy for the paths
path = shelve_item["value"]
if "{" in path:
path = StringTemplate.format_template(path, formatting_data)
name = shelve_item["name"]
shelf_name = None
try:
shelf_name = lib.load_shelf(path, name=name)
except ValueError as exc:
print(f"Failed to load shelf -> {exc}")
if shelf_name:
self.shelves.append(shelf_name)
def _uninstall_shelves(self):
for shelf_name in self.shelves:
substance_painter.resource.Shelves.remove(shelf_name)
self.shelves.clear()
def on_open():
log.info("Running callback on open..")
if any_outdated_containers():
from ayon_core.tools.utils import SimplePopup
log.warning("Scene has outdated content.")
# Get main window
parent = substance_painter.ui.get_main_window()
if parent is None:
log.info("Skipping outdated content pop-up "
"because Substance window can't be found.")
else:
# Show outdated pop-up
def _on_show_inventory():
from ayon_core.tools.utils import host_tools
host_tools.show_scene_inventory(parent=parent)
dialog = SimplePopup(parent=parent)
dialog.setWindowTitle("Substance scene has outdated content")
dialog.set_message("There are outdated containers in "
"your Substance scene.")
dialog.on_clicked.connect(_on_show_inventory)
dialog.show()
def imprint_container(container,
name,
namespace,
context,
loader):
"""Imprint a loaded container with metadata.
Containerisation enables a tracking of version, author and origin
for loaded assets.
Arguments:
container (dict): The (substance metadata) dictionary to imprint into.
name (str): Name of resulting assembly
namespace (str): Namespace under which to host container
context (dict): Asset information
loader (load.LoaderPlugin): loader instance used to produce container.
Returns:
None
"""
data = [
("schema", "openpype:container-2.0"),
("id", AVALON_CONTAINER_ID),
("name", str(name)),
("namespace", str(namespace) if namespace else None),
("loader", str(loader.__class__.__name__)),
("representation", context["representation"]["id"]),
]
for key, value in data:
container[key] = value
def set_container_metadata(object_name, container_data, update=False):
"""Helper method to directly set the data for a specific container
Args:
object_name (str): The unique object name identifier for the container
container_data (dict): The data for the container.
Note 'objectName' data is derived from `object_name` and key in
`container_data` will be ignored.
update (bool): Whether to only update the dict data.
"""
# The objectName is derived from the key in the metadata so won't be stored
# in the metadata in the container's data.
container_data.pop("objectName", None)
metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY)
containers = metadata.get(OPENPYPE_METADATA_CONTAINERS_KEY) or {}
if update:
existing_data = containers.setdefault(object_name, {})
existing_data.update(container_data) # mutable dict, in-place update
else:
containers[object_name] = container_data
metadata.set("containers", containers)
def remove_container_metadata(object_name):
"""Helper method to remove the data for a specific container"""
metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY)
containers = metadata.get(OPENPYPE_METADATA_CONTAINERS_KEY)
if containers:
containers.pop(object_name, None)
metadata.set("containers", containers)
def set_instance(instance_id, instance_data, update=False):
"""Helper method to directly set the data for a specific container
Args:
instance_id (str): Unique identifier for the instance
instance_data (dict): The instance data to store in the metaadata.
"""
set_instances({instance_id: instance_data}, update=update)
def set_instances(instance_data_by_id, update=False):
"""Store data for multiple instances at the same time.
This is more optimal than querying and setting them in the metadata one
by one.
"""
metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY)
instances = metadata.get(OPENPYPE_METADATA_INSTANCES_KEY) or {}
for instance_id, instance_data in instance_data_by_id.items():
if update:
existing_data = instances.get(instance_id, {})
existing_data.update(instance_data)
else:
instances[instance_id] = instance_data
metadata.set("instances", instances)
def remove_instance(instance_id):
"""Helper method to remove the data for a specific container"""
metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY)
instances = metadata.get(OPENPYPE_METADATA_INSTANCES_KEY) or {}
instances.pop(instance_id, None)
metadata.set("instances", instances)
def get_instances_by_id():
"""Return all instances stored in the project instances metadata"""
if not substance_painter.project.is_open():
return {}
metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY)
return metadata.get(OPENPYPE_METADATA_INSTANCES_KEY) or {}
def get_instances():
"""Return all instances stored in the project instances as a list"""
return list(get_instances_by_id().values())

View file

@ -0,0 +1,10 @@
from .addon import (
SubstanceAddon,
SUBSTANCE_HOST_DIR,
)
__all__ = (
"SubstanceAddon",
"SUBSTANCE_HOST_DIR"
)

View file

@ -0,0 +1,31 @@
import os
from ayon_core.addon import AYONAddon, IHostAddon
SUBSTANCE_HOST_DIR = os.path.dirname(os.path.abspath(__file__))
class SubstanceAddon(AYONAddon, IHostAddon):
name = "substancepainter"
host_name = "substancepainter"
def add_implementation_envs(self, env, _app):
# Add requirements to SUBSTANCE_PAINTER_PLUGINS_PATH
plugin_path = os.path.join(SUBSTANCE_HOST_DIR, "deploy")
plugin_path = plugin_path.replace("\\", "/")
if env.get("SUBSTANCE_PAINTER_PLUGINS_PATH"):
plugin_path += os.pathsep + env["SUBSTANCE_PAINTER_PLUGINS_PATH"]
env["SUBSTANCE_PAINTER_PLUGINS_PATH"] = plugin_path
# Log in Substance Painter doesn't support custom terminal colors
env["AYON_LOG_NO_COLORS"] = "1"
def get_launch_hook_paths(self, app):
if app.host_name != self.host_name:
return []
return [
os.path.join(SUBSTANCE_HOST_DIR, "hooks")
]
def get_workfile_extensions(self):
return [".spp", ".toc"]

View file

@ -0,0 +1,8 @@
from .pipeline import (
SubstanceHost,
)
__all__ = [
"SubstanceHost",
]

View file

@ -0,0 +1,157 @@
"""Substance Painter OCIO management
Adobe Substance 3D Painter supports OCIO color management using a per project
configuration. Output color spaces are defined at the project level
More information see:
- https://substance3d.adobe.com/documentation/spdoc/color-management-223053233.html # noqa
- https://substance3d.adobe.com/documentation/spdoc/color-management-with-opencolorio-225969419.html # noqa
"""
import substance_painter.export
import substance_painter.js
import json
from .lib import (
get_document_structure,
get_channel_format
)
def _iter_document_stack_channels():
"""Yield all stack paths and channels project"""
for material in get_document_structure()["materials"]:
material_name = material["name"]
for stack in material["stacks"]:
stack_name = stack["name"]
if stack_name:
stack_path = [material_name, stack_name]
else:
stack_path = material_name
for channel in stack["channels"]:
yield stack_path, channel
def _get_first_color_and_data_stack_and_channel():
"""Return first found color channel and data channel."""
color_channel = None
data_channel = None
for stack_path, channel in _iter_document_stack_channels():
channel_format = get_channel_format(stack_path, channel)
if channel_format["color"]:
color_channel = (stack_path, channel)
else:
data_channel = (stack_path, channel)
if color_channel and data_channel:
return color_channel, data_channel
return color_channel, data_channel
def get_project_channel_data():
"""Return colorSpace settings for the current substance painter project.
In Substance Painter only color channels have Color Management enabled
whereas data channels have no color management applied. This can't be
changed. The artist can only customize the export color space for color
channels per bit-depth for 8 bpc, 16 bpc and 32 bpc.
As such this returns the color space for 'data' and for per bit-depth
for color channels.
Example output:
{
"data": {'colorSpace': 'Utility - Raw'},
"8": {"colorSpace": "ACES - AcesCG"},
"16": {"colorSpace": "ACES - AcesCG"},
"16f": {"colorSpace": "ACES - AcesCG"},
"32f": {"colorSpace": "ACES - AcesCG"}
}
"""
keys = ["colorSpace"]
query = {key: f"${key}" for key in keys}
config = {
"exportPath": "/",
"exportShaderParams": False,
"defaultExportPreset": "query_preset",
"exportPresets": [{
"name": "query_preset",
# List of maps making up this export preset.
"maps": [{
"fileName": json.dumps(query),
# List of source/destination defining which channels will
# make up the texture file.
"channels": [],
"parameters": {
"fileFormat": "exr",
"bitDepth": "32f",
"dithering": False,
"sizeLog2": 4,
"paddingAlgorithm": "passthrough",
"dilationDistance": 16
}
}]
}],
}
def _get_query_output(config):
# Return the basename of the single output path we defined
result = substance_painter.export.list_project_textures(config)
path = next(iter(result.values()))[0]
# strip extension and slash since we know relevant json data starts
# and ends with { and } characters
path = path.strip("/\\.exr")
return json.loads(path)
# Query for each type of channel (color and data)
color_channel, data_channel = _get_first_color_and_data_stack_and_channel()
colorspaces = {}
for key, channel_data in {
"data": data_channel,
"color": color_channel
}.items():
if channel_data is None:
# No channel of that datatype anywhere in the Stack. We're
# unable to identify the output color space of the project
colorspaces[key] = None
continue
stack, channel = channel_data
# Stack must be a string
if not isinstance(stack, str):
# Assume iterable
stack = "/".join(stack)
# Define the temp output config
config["exportList"] = [{"rootPath": stack}]
config_map = config["exportPresets"][0]["maps"][0]
config_map["channels"] = [
{
"destChannel": x,
"srcChannel": x,
"srcMapType": "documentMap",
"srcMapName": channel
} for x in "RGB"
]
if key == "color":
# Query for each bit depth
# Color space definition can have a different OCIO config set
# for 8-bit, 16-bit and 32-bit outputs so we need to check each
# bit depth
for depth in ["8", "16", "16f", "32f"]:
config_map["parameters"]["bitDepth"] = depth # noqa
colorspaces[key + depth] = _get_query_output(config)
else:
# Data channel (not color managed)
colorspaces[key] = _get_query_output(config)
return colorspaces

View file

@ -0,0 +1,642 @@
import os
import re
import json
from collections import defaultdict
import substance_painter.project
import substance_painter.resource
import substance_painter.js
import substance_painter.export
from qtpy import QtGui, QtWidgets, QtCore
def get_export_presets():
"""Return Export Preset resource URLs for all available Export Presets.
Returns:
dict: {Resource url: GUI Label}
"""
# TODO: Find more optimal way to find all export templates
preset_resources = {}
for shelf in substance_painter.resource.Shelves.all():
shelf_path = os.path.normpath(shelf.path())
presets_path = os.path.join(shelf_path, "export-presets")
if not os.path.exists(presets_path):
continue
for filename in os.listdir(presets_path):
if filename.endswith(".spexp"):
template_name = os.path.splitext(filename)[0]
resource = substance_painter.resource.ResourceID(
context=shelf.name(),
name=template_name
)
resource_url = resource.url()
preset_resources[resource_url] = template_name
# Sort by template name
export_templates = dict(sorted(preset_resources.items(),
key=lambda x: x[1]))
# Add default built-ins at the start
# TODO: find the built-ins automatically; scraped with https://gist.github.com/BigRoy/97150c7c6f0a0c916418207b9a2bc8f1 # noqa
result = {
"export-preset-generator://viewport2d": "2D View", # noqa
"export-preset-generator://doc-channel-normal-no-alpha": "Document channels + Normal + AO (No Alpha)", # noqa
"export-preset-generator://doc-channel-normal-with-alpha": "Document channels + Normal + AO (With Alpha)", # noqa
"export-preset-generator://sketchfab": "Sketchfab", # noqa
"export-preset-generator://adobe-standard-material": "Substance 3D Stager", # noqa
"export-preset-generator://usd": "USD PBR Metal Roughness", # noqa
"export-preset-generator://gltf": "glTF PBR Metal Roughness", # noqa
"export-preset-generator://gltf-displacement": "glTF PBR Metal Roughness + Displacement texture (experimental)" # noqa
}
result.update(export_templates)
return result
def _convert_stack_path_to_cmd_str(stack_path):
"""Convert stack path `str` or `[str, str]` for javascript query
Example usage:
>>> stack_path = _convert_stack_path_to_cmd_str(stack_path)
>>> cmd = f"alg.mapexport.channelIdentifiers({stack_path})"
>>> substance_painter.js.evaluate(cmd)
Args:
stack_path (list or str): Path to the stack, could be
"Texture set name" or ["Texture set name", "Stack name"]
Returns:
str: Stack path usable as argument in javascript query.
"""
return json.dumps(stack_path)
def get_channel_identifiers(stack_path=None):
"""Return the list of channel identifiers.
If a context is passed (texture set/stack),
return only used channels with resolved user channels.
Channel identifiers are:
basecolor, height, specular, opacity, emissive, displacement,
glossiness, roughness, anisotropylevel, anisotropyangle, transmissive,
scattering, reflection, ior, metallic, normal, ambientOcclusion,
diffuse, specularlevel, blendingmask, [custom user names].
Args:
stack_path (list or str, Optional): Path to the stack, could be
"Texture set name" or ["Texture set name", "Stack name"]
Returns:
list: List of channel identifiers.
"""
if stack_path is None:
stack_path = ""
else:
stack_path = _convert_stack_path_to_cmd_str(stack_path)
cmd = f"alg.mapexport.channelIdentifiers({stack_path})"
return substance_painter.js.evaluate(cmd)
def get_channel_format(stack_path, channel):
"""Retrieve the channel format of a specific stack channel.
See `alg.mapexport.channelFormat` (javascript API) for more details.
The channel format data is:
"label" (str): The channel format label: could be one of
[sRGB8, L8, RGB8, L16, RGB16, L16F, RGB16F, L32F, RGB32F]
"color" (bool): True if the format is in color, False is grayscale
"floating" (bool): True if the format uses floating point
representation, false otherwise
"bitDepth" (int): Bit per color channel (could be 8, 16 or 32 bpc)
Arguments:
stack_path (list or str): Path to the stack, could be
"Texture set name" or ["Texture set name", "Stack name"]
channel (str): Identifier of the channel to export
(see `get_channel_identifiers`)
Returns:
dict: The channel format data.
"""
stack_path = _convert_stack_path_to_cmd_str(stack_path)
cmd = f"alg.mapexport.channelFormat({stack_path}, '{channel}')"
return substance_painter.js.evaluate(cmd)
def get_document_structure():
"""Dump the document structure.
See `alg.mapexport.documentStructure` (javascript API) for more details.
Returns:
dict: Document structure or None when no project is open
"""
return substance_painter.js.evaluate("alg.mapexport.documentStructure()")
def get_export_templates(config, format="png", strip_folder=True):
"""Return export config outputs.
This use the Javascript API `alg.mapexport.getPathsExportDocumentMaps`
which returns a different output than using the Python equivalent
`substance_painter.export.list_project_textures(config)`.
The nice thing about the Javascript API version is that it returns the
output textures grouped by filename template.
A downside is that it doesn't return all the UDIM tiles but per template
always returns a single file.
Note:
The file format needs to be explicitly passed to the Javascript API
but upon exporting through the Python API the file format can be based
on the output preset. So it's likely the file extension will mismatch
Warning:
Even though the function appears to solely get the expected outputs
the Javascript API will actually create the config's texture output
folder if it does not exist yet. As such, a valid path must be set.
Example output:
{
"DefaultMaterial": {
"$textureSet_BaseColor(_$colorSpace)(.$udim)": "DefaultMaterial_BaseColor_ACES - ACEScg.1002.png", # noqa
"$textureSet_Emissive(_$colorSpace)(.$udim)": "DefaultMaterial_Emissive_ACES - ACEScg.1002.png", # noqa
"$textureSet_Height(_$colorSpace)(.$udim)": "DefaultMaterial_Height_Utility - Raw.1002.png", # noqa
"$textureSet_Metallic(_$colorSpace)(.$udim)": "DefaultMaterial_Metallic_Utility - Raw.1002.png", # noqa
"$textureSet_Normal(_$colorSpace)(.$udim)": "DefaultMaterial_Normal_Utility - Raw.1002.png", # noqa
"$textureSet_Roughness(_$colorSpace)(.$udim)": "DefaultMaterial_Roughness_Utility - Raw.1002.png" # noqa
}
}
Arguments:
config (dict) Export config
format (str, Optional): Output format to write to, defaults to 'png'
strip_folder (bool, Optional): Whether to strip the output folder
from the output filenames.
Returns:
dict: The expected output maps.
"""
folder = config["exportPath"].replace("\\", "/")
preset = config["defaultExportPreset"]
cmd = f'alg.mapexport.getPathsExportDocumentMaps("{preset}", "{folder}", "{format}")' # noqa
result = substance_painter.js.evaluate(cmd)
if strip_folder:
for _stack, maps in result.items():
for map_template, map_filepath in maps.items():
map_filepath = map_filepath.replace("\\", "/")
assert map_filepath.startswith(folder)
map_filename = map_filepath[len(folder):].lstrip("/")
maps[map_template] = map_filename
return result
def _templates_to_regex(templates,
texture_set,
colorspaces,
project,
mesh):
"""Return regex based on a Substance Painter expot filename template.
This converts Substance Painter export filename templates like
`$mesh_$textureSet_BaseColor(_$colorSpace)(.$udim)` into a regex
which can be used to query an output filename to help retrieve:
- Which template filename the file belongs to.
- Which color space the file is written with.
- Which udim tile it is exactly.
This is used by `get_parsed_export_maps` which tries to as explicitly
as possible match the filename pattern against the known possible outputs.
That's why Texture Set name, Color spaces, Project path and mesh path must
be provided. By doing so we get the best shot at correctly matching the
right template because otherwise $texture_set could basically be any string
and thus match even that of a color space or mesh.
Arguments:
templates (list): List of templates to convert to regex.
texture_set (str): The texture set to match against.
colorspaces (list): The colorspaces defined in the current project.
project (str): Filepath of current substance project.
mesh (str): Path to mesh file used in current project.
Returns:
dict: Template: Template regex pattern
"""
def _filename_no_ext(path):
return os.path.splitext(os.path.basename(path))[0]
if colorspaces and any(colorspaces):
colorspace_match = "|".join(re.escape(c) for c in set(colorspaces))
colorspace_match = f"({colorspace_match})"
else:
# No colorspace support enabled
colorspace_match = ""
# Key to regex valid search values
key_matches = {
"$project": re.escape(_filename_no_ext(project)),
"$mesh": re.escape(_filename_no_ext(mesh)),
"$textureSet": re.escape(texture_set),
"$colorSpace": colorspace_match,
"$udim": "([0-9]{4})"
}
# Turn the templates into regexes
regexes = {}
for template in templates:
# We need to tweak a temp
search_regex = re.escape(template)
# Let's assume that any ( and ) character in the file template was
# intended as an optional template key and do a simple `str.replace`
# Note: we are matching against re.escape(template) so will need to
# search for the escaped brackets.
search_regex = search_regex.replace(re.escape("("), "(")
search_regex = search_regex.replace(re.escape(")"), ")?")
# Substitute each key into a named group
for key, key_expected_regex in key_matches.items():
# We want to use the template as a regex basis in the end so will
# escape the whole thing first. Note that thus we'll need to
# search for the escaped versions of the keys too.
escaped_key = re.escape(key)
key_label = key[1:] # key without $ prefix
key_expected_grp_regex = f"(?P<{key_label}>{key_expected_regex})"
search_regex = search_regex.replace(escaped_key,
key_expected_grp_regex)
# The filename templates don't include the extension so we add it
# to be able to match the out filename beginning to end
ext_regex = r"(?P<ext>\.[A-Za-z][A-Za-z0-9-]*)"
search_regex = rf"^{search_regex}{ext_regex}$"
regexes[template] = search_regex
return regexes
def strip_template(template, strip="._ "):
"""Return static characters in a substance painter filename template.
>>> strip_template("$textureSet_HELLO(.$udim)")
# HELLO
>>> strip_template("$mesh_$textureSet_HELLO_WORLD_$colorSpace(.$udim)")
# HELLO_WORLD
>>> strip_template("$textureSet_HELLO(.$udim)", strip=None)
# _HELLO
>>> strip_template("$mesh_$textureSet_$colorSpace(.$udim)", strip=None)
# _HELLO_
>>> strip_template("$textureSet_HELLO(.$udim)")
# _HELLO
Arguments:
template (str): Filename template to strip.
strip (str, optional): Characters to strip from beginning and end
of the static string in template. Defaults to: `._ `.
Returns:
str: The static string in filename template.
"""
# Return only characters that were part of the template that were static.
# Remove all keys
keys = ["$project", "$mesh", "$textureSet", "$udim", "$colorSpace"]
stripped_template = template
for key in keys:
stripped_template = stripped_template.replace(key, "")
# Everything inside an optional bracket space is excluded since it's not
# static. We keep a counter to track whether we are currently iterating
# over parts of the template that are inside an 'optional' group or not.
counter = 0
result = ""
for char in stripped_template:
if char == "(":
counter += 1
elif char == ")":
counter -= 1
if counter < 0:
counter = 0
else:
if counter == 0:
result += char
if strip:
# Strip of any trailing start/end characters. Technically these are
# static but usually start and end separators like space or underscore
# aren't wanted.
result = result.strip(strip)
return result
def get_parsed_export_maps(config):
"""Return Export Config's expected output textures with parsed data.
This tries to parse the texture outputs using a Python API export config.
Parses template keys: $project, $mesh, $textureSet, $colorSpace, $udim
Example:
{("DefaultMaterial", ""): {
"$mesh_$textureSet_BaseColor(_$colorSpace)(.$udim)": [
{
// OUTPUT DATA FOR FILE #1 OF THE TEMPLATE
},
{
// OUTPUT DATA FOR FILE #2 OF THE TEMPLATE
},
]
},
}}
File output data (all outputs are `str`).
1) Parsed tokens: These are parsed tokens from the template, they will
only exist if found in the filename template and output filename.
project: Workfile filename without extension
mesh: Filename of the loaded mesh without extension
textureSet: The texture set, e.g. "DefaultMaterial",
colorSpace: The color space, e.g. "ACES - ACEScg",
udim: The udim tile, e.g. "1001"
2) Template output and filepath
filepath: Full path to the resulting texture map, e.g.
"/path/to/mesh_DefaultMaterial_BaseColor_ACES - ACEScg.1002.png",
output: "mesh_DefaultMaterial_BaseColor_ACES - ACEScg.1002.png"
Note: if template had slashes (folders) then `output` will too.
So `output` might include a folder.
Returns:
dict: [texture_set, stack]: {template: [file1_data, file2_data]}
"""
# Import is here to avoid recursive lib <-> colorspace imports
from .colorspace import get_project_channel_data
outputs = substance_painter.export.list_project_textures(config)
templates = get_export_templates(config, strip_folder=False)
# Get all color spaces set for the current project
project_colorspaces = set(
data["colorSpace"] for data in get_project_channel_data().values()
)
# Get current project mesh path and project path to explicitly match
# the $mesh and $project tokens
project_mesh_path = substance_painter.project.last_imported_mesh_path()
project_path = substance_painter.project.file_path()
# Get the current export path to strip this of the beginning of filepath
# results, since filename templates don't have these we'll match without
# that part of the filename.
export_path = config["exportPath"]
export_path = export_path.replace("\\", "/")
if not export_path.endswith("/"):
export_path += "/"
# Parse the outputs
result = {}
for key, filepaths in outputs.items():
texture_set, stack = key
if stack:
stack_path = f"{texture_set}/{stack}"
else:
stack_path = texture_set
stack_templates = list(templates[stack_path].keys())
template_regex = _templates_to_regex(stack_templates,
texture_set=texture_set,
colorspaces=project_colorspaces,
mesh=project_mesh_path,
project=project_path)
# Let's precompile the regexes
for template, regex in template_regex.items():
template_regex[template] = re.compile(regex)
stack_results = defaultdict(list)
for filepath in sorted(filepaths):
# We strip explicitly using the full parent export path instead of
# using `os.path.basename` because export template is allowed to
# have subfolders in its template which we want to match against
filepath = filepath.replace("\\", "/")
assert filepath.startswith(export_path), (
f"Filepath {filepath} must start with folder {export_path}"
)
filename = filepath[len(export_path):]
for template, regex in template_regex.items():
match = regex.match(filename)
if match:
parsed = match.groupdict(default={})
# Include some special outputs for convenience
parsed["filepath"] = filepath
parsed["output"] = filename
stack_results[template].append(parsed)
break
else:
raise ValueError(f"Unable to match {filename} against any "
f"template in: {list(template_regex.keys())}")
result[key] = dict(stack_results)
return result
def load_shelf(path, name=None):
"""Add shelf to substance painter (for current application session)
This will dynamically add a Shelf for the current session. It's good
to note however that these will *not* persist on restart of the host.
Note:
Consider the loaded shelf a static library of resources.
The shelf will *not* be visible in application preferences in
Edit > Settings > Libraries.
The shelf will *not* show in the Assets browser if it has no existing
assets
The shelf will *not* be a selectable option for selecting it as a
destination to import resources too.
"""
# Ensure expanded path with forward slashes
path = os.path.expandvars(path)
path = os.path.abspath(path)
path = path.replace("\\", "/")
# Path must exist
if not os.path.isdir(path):
raise ValueError(f"Path is not an existing folder: {path}")
# This name must be unique and must only contain lowercase letters,
# numbers, underscores or hyphens.
if name is None:
name = os.path.basename(path)
name = name.lower()
name = re.sub(r"[^a-z0-9_\-]", "_", name) # sanitize to underscores
if substance_painter.resource.Shelves.exists(name):
shelf = next(
shelf for shelf in substance_painter.resource.Shelves.all()
if shelf.name() == name
)
if os.path.normpath(shelf.path()) != os.path.normpath(path):
raise ValueError(f"Shelf with name '{name}' already exists "
f"for a different path: '{shelf.path()}")
return
print(f"Adding Shelf '{name}' to path: {path}")
substance_painter.resource.Shelves.add(name, path)
return name
def _get_new_project_action():
"""Return QAction which triggers Substance Painter's new project dialog"""
main_window = substance_painter.ui.get_main_window()
# Find the file menu's New file action
menubar = main_window.menuBar()
new_action = None
for action in menubar.actions():
menu = action.menu()
if not menu:
continue
if menu.objectName() != "file":
continue
# Find the action with the CTRL+N key sequence
new_action = next(action for action in menu.actions()
if action.shortcut() == QtGui.QKeySequence.New)
break
return new_action
def prompt_new_file_with_mesh(mesh_filepath):
"""Prompts the user for a new file using Substance Painter's own dialog.
This will set the mesh path to load to the given mesh and disables the
dialog box to disallow the user to change the path. This way we can allow
user configuration of a project but set the mesh path ourselves.
Warning:
This is very hacky and experimental.
Note:
If a project is currently open using the same mesh filepath it can't
accurately detect whether the user had actually accepted the new project
dialog or whether the project afterwards is still the original project,
for example when the user might have cancelled the operation.
"""
app = QtWidgets.QApplication.instance()
assert os.path.isfile(mesh_filepath), \
f"Mesh filepath does not exist: {mesh_filepath}"
def _setup_file_dialog():
"""Set filepath in QFileDialog and trigger accept result"""
file_dialog = app.activeModalWidget()
assert isinstance(file_dialog, QtWidgets.QFileDialog)
# Quickly hide the dialog
file_dialog.hide()
app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents, 1000)
file_dialog.setDirectory(os.path.dirname(mesh_filepath))
url = QtCore.QUrl.fromLocalFile(os.path.basename(mesh_filepath))
file_dialog.selectUrl(url)
# TODO: find a way to improve the process event to
# load more complicated mesh
app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents, 3000)
file_dialog.done(file_dialog.Accepted)
app.processEvents(QtCore.QEventLoop.AllEvents)
def _setup_prompt():
app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents)
dialog = app.activeModalWidget()
assert dialog.objectName() == "NewProjectDialog"
# Set the window title
mesh = os.path.basename(mesh_filepath)
dialog.setWindowTitle(f"New Project with mesh: {mesh}")
# Get the select mesh file button
mesh_select = dialog.findChild(QtWidgets.QPushButton, "meshSelect")
# Hide the select mesh button to the user to block changing of mesh
mesh_select.setVisible(False)
# Ensure UI is visually up-to-date
app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents, 8000)
# Trigger the 'select file' dialog to set the path and have the
# new file dialog to use the path.
QtCore.QTimer.singleShot(10, _setup_file_dialog)
mesh_select.click()
app.processEvents(QtCore.QEventLoop.AllEvents, 5000)
mesh_filename = dialog.findChild(QtWidgets.QFrame, "meshFileName")
mesh_filename_label = mesh_filename.findChild(QtWidgets.QLabel)
if not mesh_filename_label.text():
dialog.close()
substance_painter.logging.warning(
"Failed to set mesh path with the prompt dialog:"
f"{mesh_filepath}\n\n"
"Creating new project directly with the mesh path instead.")
new_action = _get_new_project_action()
if not new_action:
raise RuntimeError("Unable to detect new file action..")
QtCore.QTimer.singleShot(0, _setup_prompt)
new_action.trigger()
app.processEvents(QtCore.QEventLoop.AllEvents, 5000)
if not substance_painter.project.is_open():
return
# Confirm mesh was set as expected
project_mesh = substance_painter.project.last_imported_mesh_path()
if os.path.normpath(project_mesh) != os.path.normpath(mesh_filepath):
return
return project_mesh

View file

@ -0,0 +1,36 @@
def cleanup_openpype_qt_widgets():
"""
Workaround for Substance failing to shut down correctly
when a Qt window was still open at the time of shutting down.
This seems to work sometimes, but not all the time.
"""
# TODO: Create a more reliable method to close down all OpenPype Qt widgets
from PySide2 import QtWidgets
import substance_painter.ui
# Kill OpenPype Qt widgets
print("Killing OpenPype Qt widgets..")
for widget in QtWidgets.QApplication.topLevelWidgets():
if widget.__module__.startswith("openpype."):
print(f"Deleting widget: {widget.__class__.__name__}")
substance_painter.ui.delete_ui_element(widget)
def start_plugin():
from ayon_core.pipeline import install_host
from ayon_core.hosts.substancepainter.api import SubstanceHost
install_host(SubstanceHost())
def close_plugin():
from ayon_core.pipeline import uninstall_host
cleanup_openpype_qt_widgets()
uninstall_host()
if __name__ == "__main__":
start_plugin()

View file

@ -0,0 +1,43 @@
"""Ease the OpenPype on-boarding process by loading the plug-in on first run"""
OPENPYPE_PLUGIN_NAME = "openpype_plugin"
def start_plugin():
try:
# This isn't exposed in the official API so we keep it in a try-except
from painter_plugins_ui import (
get_settings,
LAUNCH_AT_START_KEY,
ON_STATE,
PLUGINS_MENU,
plugin_manager
)
# The `painter_plugins_ui` plug-in itself is also a startup plug-in
# we need to take into account that it could run either earlier or
# later than this startup script, we check whether its menu initialized
is_before_plugins_menu = PLUGINS_MENU is None
settings = get_settings(OPENPYPE_PLUGIN_NAME)
if settings.value(LAUNCH_AT_START_KEY, None) is None:
print("Initializing OpenPype plug-in on first run...")
if is_before_plugins_menu:
print("- running before 'painter_plugins_ui'")
# Delay the launch to the painter_plugins_ui initialization
settings.setValue(LAUNCH_AT_START_KEY, ON_STATE)
else:
# Launch now
print("- running after 'painter_plugins_ui'")
plugin_manager(OPENPYPE_PLUGIN_NAME)(True)
# Set the checked state in the menu to avoid confusion
action = next(action for action in PLUGINS_MENU._menu.actions()
if action.text() == OPENPYPE_PLUGIN_NAME)
if action is not None:
action.blockSignals(True)
action.setChecked(True)
action.blockSignals(False)
except Exception as exc:
print(exc)

View file

@ -0,0 +1,175 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating textures."""
from ayon_core.pipeline import CreatedInstance, Creator, CreatorError
from ayon_core.lib import (
EnumDef,
UILabelDef,
NumberDef,
BoolDef
)
from ayon_core.hosts.substancepainter.api.pipeline import (
get_instances,
set_instance,
set_instances,
remove_instance
)
from ayon_core.hosts.substancepainter.api.lib import get_export_presets
import substance_painter.project
class CreateTextures(Creator):
"""Create a texture set."""
identifier = "io.openpype.creators.substancepainter.textureset"
label = "Textures"
product_type = "textureSet"
icon = "picture-o"
default_variant = "Main"
def create(self, product_name, instance_data, pre_create_data):
if not substance_painter.project.is_open():
raise CreatorError("Can't create a Texture Set instance without "
"an open project.")
# Transfer settings from pre create to instance
creator_attributes = instance_data.setdefault(
"creator_attributes", dict())
for key in [
"exportPresetUrl",
"exportFileFormat",
"exportSize",
"exportPadding",
"exportDilationDistance"
]:
if key in pre_create_data:
creator_attributes[key] = pre_create_data[key]
instance = self.create_instance_in_context(product_name,
instance_data)
set_instance(
instance_id=instance["instance_id"],
instance_data=instance.data_to_store()
)
def collect_instances(self):
for instance in get_instances():
if (instance.get("creator_identifier") == self.identifier or
instance.get("productType") == self.product_type):
self.create_instance_in_context_from_existing(instance)
def update_instances(self, update_list):
instance_data_by_id = {}
for instance, _changes in update_list:
# Persist the data
instance_id = instance.get("instance_id")
instance_data = instance.data_to_store()
instance_data_by_id[instance_id] = instance_data
set_instances(instance_data_by_id, update=True)
def remove_instances(self, instances):
for instance in instances:
remove_instance(instance["instance_id"])
self._remove_instance_from_context(instance)
# Helper methods (this might get moved into Creator class)
def create_instance_in_context(self, product_name, data):
instance = CreatedInstance(
self.product_type, product_name, data, self
)
self.create_context.creator_adds_instance(instance)
return instance
def create_instance_in_context_from_existing(self, data):
instance = CreatedInstance.from_existing(data, self)
self.create_context.creator_adds_instance(instance)
return instance
def get_instance_attr_defs(self):
return [
EnumDef("exportPresetUrl",
items=get_export_presets(),
label="Output Template"),
BoolDef("allowSkippedMaps",
label="Allow Skipped Output Maps",
tooltip="When enabled this allows the publish to ignore "
"output maps in the used output template if one "
"or more maps are skipped due to the required "
"channels not being present in the current file.",
default=True),
EnumDef("exportFileFormat",
items={
None: "Based on output template",
# TODO: Get available extensions from substance API
"bmp": "bmp",
"ico": "ico",
"jpeg": "jpeg",
"jng": "jng",
"pbm": "pbm",
"pgm": "pgm",
"png": "png",
"ppm": "ppm",
"tga": "targa",
"tif": "tiff",
"wap": "wap",
"wbmp": "wbmp",
"xpm": "xpm",
"gif": "gif",
"hdr": "hdr",
"exr": "exr",
"j2k": "j2k",
"jp2": "jp2",
"pfm": "pfm",
"webp": "webp",
# TODO: Unsure why jxr format fails to export
# "jxr": "jpeg-xr",
# TODO: File formats that combine the exported textures
# like psd are not correctly supported due to
# publishing only a single file
# "psd": "psd",
# "sbsar": "sbsar",
},
default=None,
label="File type"),
EnumDef("exportSize",
items={
None: "Based on each Texture Set's size",
# The key is size of the texture file in log2.
# (i.e. 10 means 2^10 = 1024)
7: "128",
8: "256",
9: "512",
10: "1024",
11: "2048",
12: "4096",
13: "8192"
},
default=None,
label="Size"),
EnumDef("exportPadding",
items={
"passthrough": "No padding (passthrough)",
"infinite": "Dilation infinite",
"transparent": "Dilation + transparent",
"color": "Dilation + default background color",
"diffusion": "Dilation + diffusion"
},
default="infinite",
label="Padding"),
NumberDef("exportDilationDistance",
minimum=0,
maximum=256,
decimals=0,
default=16,
label="Dilation Distance"),
UILabelDef("*only used with "
"'Dilation + <x>' padding"),
]
def get_pre_create_attr_defs(self):
# Use same attributes as for instance attributes
return self.get_instance_attr_defs()

View file

@ -0,0 +1,124 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating workfiles."""
import ayon_api
from ayon_core.pipeline import CreatedInstance, AutoCreator
from ayon_core.hosts.substancepainter.api.pipeline import (
set_instances,
set_instance,
get_instances
)
import substance_painter.project
class CreateWorkfile(AutoCreator):
"""Workfile auto-creator."""
identifier = "io.openpype.creators.substancepainter.workfile"
label = "Workfile"
product_type = "workfile"
icon = "document"
default_variant = "Main"
def create(self):
if not substance_painter.project.is_open():
return
variant = self.default_variant
project_name = self.project_name
folder_path = self.create_context.get_current_folder_path()
task_name = self.create_context.get_current_task_name()
host_name = self.create_context.host_name
# Workfile instance should always exist and must only exist once.
# As such we'll first check if it already exists and is collected.
current_instance = next(
(
instance for instance in self.create_context.instances
if instance.creator_identifier == self.identifier
), None)
current_folder_path = None
if current_instance is not None:
current_folder_path = current_instance["folderPath"]
if current_instance is None:
self.log.info("Auto-creating workfile instance...")
folder_entity = ayon_api.get_folder_by_path(
project_name, folder_path
)
task_entity = ayon_api.get_task_by_name(
project_name, folder_entity["id"], task_name
)
product_name = self.get_product_name(
project_name,
folder_entity,
task_entity,
variant,
host_name,
)
data = {
"folderPath": folder_path,
"task": task_name,
"variant": variant
}
current_instance = self.create_instance_in_context(product_name,
data)
elif (
current_folder_path != folder_path
or current_instance["task"] != task_name
):
# Update instance context if is not the same
folder_entity = ayon_api.get_folder_by_path(
project_name, folder_path
)
task_entity = ayon_api.get_task_by_name(
project_name, folder_entity["id"], task_name
)
product_name = self.get_product_name(
project_name,
folder_entity,
task_entity,
variant,
host_name,
)
current_instance["folderPath"] = folder_path
current_instance["task"] = task_name
current_instance["productName"] = product_name
set_instance(
instance_id=current_instance.get("instance_id"),
instance_data=current_instance.data_to_store()
)
def collect_instances(self):
for instance in get_instances():
if (instance.get("creator_identifier") == self.identifier or
instance.get("productType") == self.product_type):
self.create_instance_in_context_from_existing(instance)
def update_instances(self, update_list):
instance_data_by_id = {}
for instance, _changes in update_list:
# Persist the data
instance_id = instance.get("instance_id")
instance_data = instance.data_to_store()
instance_data_by_id[instance_id] = instance_data
set_instances(instance_data_by_id, update=True)
# Helper methods (this might get moved into Creator class)
def create_instance_in_context(self, product_name, data):
instance = CreatedInstance(
self.product_type, product_name, data, self
)
self.create_context.creator_adds_instance(instance)
return instance
def create_instance_in_context_from_existing(self, data):
instance = CreatedInstance.from_existing(data, self)
self.create_context.creator_adds_instance(instance)
return instance

View file

@ -0,0 +1,246 @@
import copy
from qtpy import QtWidgets, QtCore
from ayon_core.pipeline import (
load,
get_representation_path,
)
from ayon_core.pipeline.load import LoadError
from ayon_core.hosts.substancepainter.api.pipeline import (
imprint_container,
set_container_metadata,
remove_container_metadata
)
import substance_painter.project
def _convert(substance_attr):
"""Return Substance Painter Python API Project attribute from string.
This converts a string like "ProjectWorkflow.Default" to for example
the Substance Painter Python API equivalent object, like:
`substance_painter.project.ProjectWorkflow.Default`
Args:
substance_attr (str): The `substance_painter.project` attribute,
for example "ProjectWorkflow.Default"
Returns:
Any: Substance Python API object of the project attribute.
Raises:
ValueError: If attribute does not exist on the
`substance_painter.project` python api.
"""
root = substance_painter.project
for attr in substance_attr.split("."):
root = getattr(root, attr, None)
if root is None:
raise ValueError(
"Substance Painter project attribute"
f" does not exist: {substance_attr}")
return root
def get_template_by_name(name: str, templates: list[dict]) -> dict:
return next(
template for template in templates
if template["name"] == name
)
class SubstanceProjectConfigurationWindow(QtWidgets.QDialog):
"""The pop-up dialog allows users to choose material
duplicate options for importing Max objects when updating
or switching assets.
"""
def __init__(self, project_templates):
super(SubstanceProjectConfigurationWindow, self).__init__()
self.setWindowFlags(self.windowFlags() | QtCore.Qt.FramelessWindowHint)
self.configuration = None
self.template_names = [template["name"] for template
in project_templates]
self.project_templates = project_templates
self.widgets = {
"label": QtWidgets.QLabel(
"Select your template for project configuration"),
"template_options": QtWidgets.QComboBox(),
"import_cameras": QtWidgets.QCheckBox("Import Cameras"),
"preserve_strokes": QtWidgets.QCheckBox("Preserve Strokes"),
"clickbox": QtWidgets.QWidget(),
"combobox": QtWidgets.QWidget(),
"buttons": QtWidgets.QDialogButtonBox(
QtWidgets.QDialogButtonBox.Ok
| QtWidgets.QDialogButtonBox.Cancel)
}
self.widgets["template_options"].addItems(self.template_names)
template_name = self.widgets["template_options"].currentText()
self._update_to_match_template(template_name)
# Build clickboxes
layout = QtWidgets.QHBoxLayout(self.widgets["clickbox"])
layout.addWidget(self.widgets["import_cameras"])
layout.addWidget(self.widgets["preserve_strokes"])
# Build combobox
layout = QtWidgets.QHBoxLayout(self.widgets["combobox"])
layout.addWidget(self.widgets["template_options"])
# Build buttons
layout = QtWidgets.QHBoxLayout(self.widgets["buttons"])
# Build layout.
layout = QtWidgets.QVBoxLayout(self)
layout.addWidget(self.widgets["label"])
layout.addWidget(self.widgets["combobox"])
layout.addWidget(self.widgets["clickbox"])
layout.addWidget(self.widgets["buttons"])
self.widgets["template_options"].currentTextChanged.connect(
self._update_to_match_template)
self.widgets["buttons"].accepted.connect(self.on_accept)
self.widgets["buttons"].rejected.connect(self.on_reject)
def on_accept(self):
self.configuration = self.get_project_configuration()
self.close()
def on_reject(self):
self.close()
def _update_to_match_template(self, template_name):
template = get_template_by_name(template_name, self.project_templates)
self.widgets["import_cameras"].setChecked(template["import_cameras"])
self.widgets["preserve_strokes"].setChecked(
template["preserve_strokes"])
def get_project_configuration(self):
templates = self.project_templates
template_name = self.widgets["template_options"].currentText()
template = get_template_by_name(template_name, templates)
template = copy.deepcopy(template) # do not edit the original
template["import_cameras"] = self.widgets["import_cameras"].isChecked()
template["preserve_strokes"] = (
self.widgets["preserve_strokes"].isChecked()
)
for key in ["normal_map_format",
"project_workflow",
"tangent_space_mode"]:
template[key] = _convert(template[key])
return template
@classmethod
def prompt(cls, templates):
dialog = cls(templates)
dialog.exec_()
configuration = dialog.configuration
dialog.deleteLater()
return configuration
class SubstanceLoadProjectMesh(load.LoaderPlugin):
"""Load mesh for project"""
product_types = {"*"}
representations = {"abc", "fbx", "obj", "gltf", "usd", "usda", "usdc"}
label = "Load mesh"
order = -10
icon = "code-fork"
color = "orange"
# Defined via settings
project_templates = []
def load(self, context, name, namespace, options=None):
# Get user inputs
result = SubstanceProjectConfigurationWindow.prompt(
self.project_templates)
if not result:
# cancelling loader action
return
if not substance_painter.project.is_open():
# Allow to 'initialize' a new project
path = self.filepath_from_context(context)
sp_settings = substance_painter.project.Settings(
import_cameras=result["import_cameras"],
normal_map_format=result["normal_map_format"],
project_workflow=result["project_workflow"],
tangent_space_mode=result["tangent_space_mode"],
default_texture_resolution=result["default_texture_resolution"]
)
settings = substance_painter.project.create(
mesh_file_path=path, settings=sp_settings
)
else:
# Reload the mesh
settings = substance_painter.project.MeshReloadingSettings(
import_cameras=result["import_cameras"],
preserve_strokes=result["preserve_strokes"])
def on_mesh_reload(status: substance_painter.project.ReloadMeshStatus): # noqa
if status == substance_painter.project.ReloadMeshStatus.SUCCESS: # noqa
self.log.info("Reload succeeded")
else:
raise LoadError("Reload of mesh failed")
path = self.filepath_from_context(context)
substance_painter.project.reload_mesh(path,
settings,
on_mesh_reload)
# Store container
container = {}
project_mesh_object_name = "_ProjectMesh_"
imprint_container(container,
name=project_mesh_object_name,
namespace=project_mesh_object_name,
context=context,
loader=self)
# We want store some options for updating to keep consistent behavior
# from the user's original choice. We don't store 'preserve_strokes'
# as we always preserve strokes on updates.
container["options"] = {
"import_cameras": result["import_cameras"],
}
set_container_metadata(project_mesh_object_name, container)
def switch(self, container, context):
self.update(container, context)
def update(self, container, context):
repre_entity = context["representation"]
path = get_representation_path(repre_entity)
# Reload the mesh
container_options = container.get("options", {})
settings = substance_painter.project.MeshReloadingSettings(
import_cameras=container_options.get("import_cameras", True),
preserve_strokes=True
)
def on_mesh_reload(status: substance_painter.project.ReloadMeshStatus):
if status == substance_painter.project.ReloadMeshStatus.SUCCESS:
self.log.info("Reload succeeded")
else:
raise LoadError("Reload of mesh failed")
substance_painter.project.reload_mesh(path, settings, on_mesh_reload)
# Update container representation
object_name = container["objectName"]
update_data = {"representation": repre_entity["id"]}
set_container_metadata(object_name, update_data, update=True)
def remove(self, container):
# Remove OpenPype related settings about what model was loaded
# or close the project?
# TODO: This is likely best 'hidden' away to the user because
# this will leave the project's mesh unmanaged.
remove_container_metadata(container["objectName"])

View file

@ -0,0 +1,17 @@
import pyblish.api
from ayon_core.pipeline import registered_host
class CollectCurrentFile(pyblish.api.ContextPlugin):
"""Inject the current working file into context"""
order = pyblish.api.CollectorOrder - 0.49
label = "Current Workfile"
hosts = ["substancepainter"]
def process(self, context):
host = registered_host()
path = host.get_current_workfile()
context.data["currentFile"] = path
self.log.debug(f"Current workfile: {path}")

View file

@ -0,0 +1,211 @@
import os
import copy
import pyblish.api
import ayon_api
import substance_painter.textureset
from ayon_core.pipeline import publish
from ayon_core.hosts.substancepainter.api.lib import (
get_parsed_export_maps,
strip_template
)
from ayon_core.pipeline.create import get_product_name
class CollectTextureSet(pyblish.api.InstancePlugin):
"""Extract Textures using an output template config"""
# TODO: Production-test usage of color spaces
# TODO: Detect what source data channels end up in each file
label = "Collect Texture Set images"
hosts = ["substancepainter"]
families = ["textureSet"]
order = pyblish.api.CollectorOrder
def process(self, instance):
config = self.get_export_config(instance)
project_name = instance.context.data["projectName"]
folder_entity = ayon_api.get_folder_by_path(
project_name,
instance.data["folderPath"]
)
task_name = instance.data.get("task")
task_entity = None
if folder_entity and task_name:
task_entity = ayon_api.get_task_by_name(
project_name, folder_entity["id"], task_name
)
instance.data["exportConfig"] = config
maps = get_parsed_export_maps(config)
# Let's break the instance into multiple instances to integrate
# a product per generated texture or texture UDIM sequence
for (texture_set_name, stack_name), template_maps in maps.items():
self.log.info(f"Processing {texture_set_name}/{stack_name}")
for template, outputs in template_maps.items():
self.log.info(f"Processing {template}")
self.create_image_instance(instance, template, outputs,
task_entity=task_entity,
texture_set_name=texture_set_name,
stack_name=stack_name)
def create_image_instance(self, instance, template, outputs,
task_entity, texture_set_name, stack_name):
"""Create a new instance per image or UDIM sequence.
The new instances will be of product type `image`.
"""
context = instance.context
first_filepath = outputs[0]["filepath"]
fnames = [os.path.basename(output["filepath"]) for output in outputs]
ext = os.path.splitext(first_filepath)[1]
assert ext.lstrip("."), f"No extension: {ext}"
always_include_texture_set_name = False # todo: make this configurable
all_texture_sets = substance_painter.textureset.all_texture_sets()
texture_set = substance_painter.textureset.TextureSet.from_name(
texture_set_name
)
# Define the suffix we want to give this particular texture
# set and set up a remapped product naming for it.
suffix = ""
if always_include_texture_set_name or len(all_texture_sets) > 1:
# More than one texture set, include texture set name
suffix += f".{texture_set_name}"
if texture_set.is_layered_material() and stack_name:
# More than one stack, include stack name
suffix += f".{stack_name}"
# Always include the map identifier
map_identifier = strip_template(template)
suffix += f".{map_identifier}"
task_name = task_type = None
if task_entity:
task_name = task_entity["name"]
task_type = task_entity["taskType"]
image_product_name = get_product_name(
# TODO: The product type actually isn't 'texture' currently but
# for now this is only done so the product name starts with
# 'texture'
context.data["projectName"],
task_name,
task_type,
context.data["hostName"],
product_type="texture",
variant=instance.data["variant"] + suffix,
project_settings=context.data["project_settings"]
)
# Prepare representation
representation = {
"name": ext.lstrip("."),
"ext": ext.lstrip("."),
"files": fnames if len(fnames) > 1 else fnames[0],
}
# Mark as UDIM explicitly if it has UDIM tiles.
if bool(outputs[0].get("udim")):
# The representation for a UDIM sequence should have a `udim` key
# that is a list of all udim tiles (str) like: ["1001", "1002"]
# strings. See CollectTextures plug-in and Integrators.
representation["udim"] = [output["udim"] for output in outputs]
# Set up the representation for thumbnail generation
# TODO: Simplify this once thumbnail extraction is refactored
staging_dir = os.path.dirname(first_filepath)
representation["tags"] = ["review"]
representation["stagingDir"] = staging_dir
# Clone the instance
product_type = "image"
image_instance = context.create_instance(image_product_name)
image_instance[:] = instance[:]
image_instance.data.update(copy.deepcopy(dict(instance.data)))
image_instance.data["name"] = image_product_name
image_instance.data["label"] = image_product_name
image_instance.data["productName"] = image_product_name
image_instance.data["productType"] = product_type
image_instance.data["family"] = product_type
image_instance.data["families"] = [product_type, "textures"]
image_instance.data["representations"] = [representation]
# Group the textures together in the loader
image_instance.data["productGroup"] = image_product_name
# Store the texture set name and stack name on the instance
image_instance.data["textureSetName"] = texture_set_name
image_instance.data["textureStackName"] = stack_name
# Store color space with the instance
# Note: The extractor will assign it to the representation
colorspace = outputs[0].get("colorSpace")
if colorspace:
self.log.debug(f"{image_product_name} colorspace: {colorspace}")
image_instance.data["colorspace"] = colorspace
# Store the instance in the original instance as a member
instance.append(image_instance)
def get_export_config(self, instance):
"""Return an export configuration dict for texture exports.
This config can be supplied to:
- `substance_painter.export.export_project_textures`
- `substance_painter.export.list_project_textures`
See documentation on substance_painter.export module about the
formatting of the configuration dictionary.
Args:
instance (pyblish.api.Instance): Texture Set instance to be
published.
Returns:
dict: Export config
"""
creator_attrs = instance.data["creator_attributes"]
preset_url = creator_attrs["exportPresetUrl"]
self.log.debug(f"Exporting using preset: {preset_url}")
# See: https://substance3d.adobe.com/documentation/ptpy/api/substance_painter/export # noqa
config = { # noqa
"exportShaderParams": True,
"exportPath": publish.get_instance_staging_dir(instance),
"defaultExportPreset": preset_url,
# Custom overrides to the exporter
"exportParameters": [
{
"parameters": {
"fileFormat": creator_attrs["exportFileFormat"],
"sizeLog2": creator_attrs["exportSize"],
"paddingAlgorithm": creator_attrs["exportPadding"],
"dilationDistance": creator_attrs["exportDilationDistance"] # noqa
}
}
]
}
# Create the list of Texture Sets to export.
config["exportList"] = []
for texture_set in substance_painter.textureset.all_texture_sets():
config["exportList"].append({"rootPath": texture_set.name()})
# Consider None values from the creator attributes optionals
for override in config["exportParameters"]:
parameters = override.get("parameters")
for key, value in dict(parameters).items():
if value is None:
parameters.pop(key)
return config

View file

@ -0,0 +1,26 @@
import os
import pyblish.api
class CollectWorkfileRepresentation(pyblish.api.InstancePlugin):
"""Create a publish representation for the current workfile instance."""
order = pyblish.api.CollectorOrder
label = "Workfile representation"
hosts = ["substancepainter"]
families = ["workfile"]
def process(self, instance):
context = instance.context
current_file = context.data["currentFile"]
folder, file = os.path.split(current_file)
filename, ext = os.path.splitext(file)
instance.data["representations"] = [{
"name": ext.lstrip("."),
"ext": ext.lstrip("."),
"files": file,
"stagingDir": folder,
}]

View file

@ -0,0 +1,62 @@
import substance_painter.export
from ayon_core.pipeline import KnownPublishError, publish
class ExtractTextures(publish.Extractor,
publish.ColormanagedPyblishPluginMixin):
"""Extract Textures using an output template config.
Note:
This Extractor assumes that `collect_textureset_images` has prepared
the relevant export config and has also collected the individual image
instances for publishing including its representation. That is why this
particular Extractor doesn't specify representations to integrate.
"""
label = "Extract Texture Set"
hosts = ["substancepainter"]
families = ["textureSet"]
# Run before thumbnail extractors
order = publish.Extractor.order - 0.1
def process(self, instance):
config = instance.data["exportConfig"]
result = substance_painter.export.export_project_textures(config)
if result.status != substance_painter.export.ExportStatus.Success:
raise KnownPublishError(
"Failed to export texture set: {}".format(result.message)
)
# Log what files we generated
for (texture_set_name, stack_name), maps in result.textures.items():
# Log our texture outputs
self.log.info(f"Exported stack: {texture_set_name} {stack_name}")
for texture_map in maps:
self.log.info(f"Exported texture: {texture_map}")
# We'll insert the color space data for each image instance that we
# added into this texture set. The collector couldn't do so because
# some anatomy and other instance data needs to be collected prior
context = instance.context
for image_instance in instance:
representation = next(iter(image_instance.data["representations"]))
colorspace = image_instance.data.get("colorspace")
if not colorspace:
self.log.debug("No color space data present for instance: "
f"{image_instance}")
continue
self.set_representation_colorspace(representation,
context=context,
colorspace=colorspace)
# The TextureSet instance should not be integrated. It generates no
# output data. Instead the separated texture instances are generated
# from it which themselves integrate into the database.
instance.data["integrate"] = False

View file

@ -0,0 +1,23 @@
import pyblish.api
from ayon_core.lib import version_up
from ayon_core.pipeline import registered_host
class IncrementWorkfileVersion(pyblish.api.ContextPlugin):
"""Increment current workfile version."""
order = pyblish.api.IntegratorOrder + 1
label = "Increment Workfile Version"
optional = True
hosts = ["substancepainter"]
def process(self, context):
assert all(result["success"] for result in context.data["results"]), (
"Publishing not successful so version is not increased.")
host = registered_host()
path = context.data["currentFile"]
self.log.info(f"Incrementing current workfile to: {path}")
host.save_workfile(version_up(path))

View file

@ -0,0 +1,28 @@
import pyblish.api
from ayon_core.pipeline import (
registered_host,
KnownPublishError
)
class SaveCurrentWorkfile(pyblish.api.ContextPlugin):
"""Save current workfile"""
label = "Save current workfile"
order = pyblish.api.ExtractorOrder - 0.49
hosts = ["substancepainter"]
def process(self, context):
host = registered_host()
current = host.get_current_workfile()
if context.data["currentFile"] != current:
raise KnownPublishError("Workfile has changed during publishing!")
if host.workfile_has_unsaved_changes():
self.log.info("Saving current file: {}".format(current))
host.save_workfile()
else:
self.log.debug("Skipping workfile save because there are no "
"unsaved changes.")

View file

@ -0,0 +1,110 @@
import copy
import os
import pyblish.api
import substance_painter.export
from ayon_core.pipeline import PublishValidationError
class ValidateOutputMaps(pyblish.api.InstancePlugin):
"""Validate all output maps for Output Template are generated.
Output maps will be skipped by Substance Painter if it is an output
map in the Substance Output Template which uses channels that the current
substance painter project has not painted or generated.
"""
order = pyblish.api.ValidatorOrder
label = "Validate output maps"
hosts = ["substancepainter"]
families = ["textureSet"]
def process(self, instance):
config = instance.data["exportConfig"]
# Substance Painter API does not allow to query the actual output maps
# it will generate without actually exporting the files. So we try to
# generate the smallest size / fastest export as possible
config = copy.deepcopy(config)
parameters = config["exportParameters"][0]["parameters"]
parameters["sizeLog2"] = [1, 1] # output 2x2 images (smallest)
parameters["paddingAlgorithm"] = "passthrough" # no dilation (faster)
parameters["dithering"] = False # no dithering (faster)
result = substance_painter.export.export_project_textures(config)
if result.status != substance_painter.export.ExportStatus.Success:
raise PublishValidationError(
"Failed to export texture set: {}".format(result.message)
)
generated_files = set()
for texture_maps in result.textures.values():
for texture_map in texture_maps:
generated_files.add(os.path.normpath(texture_map))
# Directly clean up our temporary export
os.remove(texture_map)
creator_attributes = instance.data.get("creator_attributes", {})
allow_skipped_maps = creator_attributes.get("allowSkippedMaps", True)
error_report_missing = []
for image_instance in instance:
# Confirm whether the instance has its expected files generated.
# We assume there's just one representation and that it is
# the actual texture representation from the collector.
representation = next(iter(image_instance.data["representations"]))
staging_dir = representation["stagingDir"]
filenames = representation["files"]
if not isinstance(filenames, (list, tuple)):
# Convert single file to list
filenames = [filenames]
missing = []
for filename in filenames:
filepath = os.path.join(staging_dir, filename)
filepath = os.path.normpath(filepath)
if filepath not in generated_files:
self.log.warning(f"Missing texture: {filepath}")
missing.append(filepath)
if not missing:
continue
if allow_skipped_maps:
# TODO: This is changing state on the instance's which
# should not be done during validation.
self.log.warning(f"Disabling texture instance: "
f"{image_instance}")
image_instance.data["active"] = False
image_instance.data["publish"] = False
image_instance.data["integrate"] = False
representation.setdefault("tags", []).append("delete")
continue
else:
error_report_missing.append((image_instance, missing))
if error_report_missing:
message = (
"The Texture Set skipped exporting some output maps which are "
"defined in the Output Template. This happens if the Output "
"Templates exports maps from channels which you do not "
"have in your current Substance Painter project.\n\n"
"To allow this enable the *Allow Skipped Output Maps* setting "
"on the instance.\n\n"
f"Instance {instance} skipped exporting output maps:\n"
""
)
for image_instance, missing in error_report_missing:
missing_str = ", ".join(missing)
message += f"- **{image_instance}** skipped: {missing_str}\n"
raise PublishValidationError(
message=message,
title="Missing output maps"
)