mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
66fadc0a0d
80 changed files with 3790 additions and 233 deletions
6
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
6
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -35,6 +35,9 @@ body:
|
|||
label: Version
|
||||
description: What version are you running? Look to OpenPype Tray
|
||||
options:
|
||||
- 3.15.6-nightly.3
|
||||
- 3.15.6-nightly.2
|
||||
- 3.15.6-nightly.1
|
||||
- 3.15.5
|
||||
- 3.15.5-nightly.2
|
||||
- 3.15.5-nightly.1
|
||||
|
|
@ -132,9 +135,6 @@ body:
|
|||
- 3.14.0
|
||||
- 3.14.0-nightly.1
|
||||
- 3.13.1-nightly.3
|
||||
- 3.13.1-nightly.2
|
||||
- 3.13.1-nightly.1
|
||||
- 3.13.0
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
|
|
|||
10
.github/workflows/update_bug_report.yml
vendored
10
.github/workflows/update_bug_report.yml
vendored
|
|
@ -18,10 +18,16 @@ jobs:
|
|||
uses: ynput/gha-populate-form-version@main
|
||||
with:
|
||||
github_token: ${{ secrets.YNPUT_BOT_TOKEN }}
|
||||
github_user: ${{ secrets.CI_USER }}
|
||||
github_email: ${{ secrets.CI_EMAIL }}
|
||||
registry: github
|
||||
dropdown: _version
|
||||
limit_to: 100
|
||||
form: .github/ISSUE_TEMPLATE/bug_report.yml
|
||||
commit_message: 'chore(): update bug report / version'
|
||||
dry_run: no-push
|
||||
|
||||
- name: Push to protected develop branch
|
||||
uses: CasperWA/push-protected@v2.10.0
|
||||
with:
|
||||
token: ${{ secrets.YNPUT_BOT_TOKEN }}
|
||||
branch: develop
|
||||
unprotect_reviews: true
|
||||
|
|
@ -25,6 +25,7 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
|||
"blender",
|
||||
"photoshop",
|
||||
"tvpaint",
|
||||
"substancepainter",
|
||||
"aftereffects"
|
||||
]
|
||||
|
||||
|
|
|
|||
37
openpype/hooks/pre_host_set_ocio.py
Normal file
37
openpype/hooks/pre_host_set_ocio.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
from openpype.lib import PreLaunchHook
|
||||
|
||||
from openpype.pipeline.colorspace import get_imageio_config
|
||||
from openpype.pipeline.template_data import get_template_data
|
||||
|
||||
|
||||
class PreLaunchHostSetOCIO(PreLaunchHook):
|
||||
"""Set OCIO environment for the host"""
|
||||
|
||||
order = 0
|
||||
app_groups = ["substancepainter"]
|
||||
|
||||
def execute(self):
|
||||
"""Hook entry method."""
|
||||
|
||||
anatomy_data = get_template_data(
|
||||
project_doc=self.data["project_doc"],
|
||||
asset_doc=self.data["asset_doc"],
|
||||
task_name=self.data["task_name"],
|
||||
host_name=self.host_name,
|
||||
system_settings=self.data["system_settings"]
|
||||
)
|
||||
|
||||
ocio_config = get_imageio_config(
|
||||
project_name=self.data["project_doc"]["name"],
|
||||
host_name=self.host_name,
|
||||
project_settings=self.data["project_settings"],
|
||||
anatomy_data=anatomy_data,
|
||||
anatomy=self.data["anatomy"]
|
||||
)
|
||||
|
||||
if ocio_config:
|
||||
ocio_path = ocio_config["path"]
|
||||
self.log.info(f"Setting OCIO config path: {ocio_path}")
|
||||
self.launch_context.env["OCIO"] = ocio_path
|
||||
else:
|
||||
self.log.debug("OCIO not set or enabled")
|
||||
|
|
@ -26,12 +26,9 @@ class RenderCreator(Creator):
|
|||
|
||||
create_allow_context_change = True
|
||||
|
||||
def __init__(self, project_settings, *args, **kwargs):
|
||||
super(RenderCreator, self).__init__(project_settings, *args, **kwargs)
|
||||
self._default_variants = (project_settings["aftereffects"]
|
||||
["create"]
|
||||
["RenderCreator"]
|
||||
["defaults"])
|
||||
# Settings
|
||||
default_variants = []
|
||||
mark_for_review = True
|
||||
|
||||
def create(self, subset_name_from_ui, data, pre_create_data):
|
||||
stub = api.get_stub() # only after After Effects is up
|
||||
|
|
@ -82,28 +79,40 @@ class RenderCreator(Creator):
|
|||
use_farm = pre_create_data["farm"]
|
||||
new_instance.creator_attributes["farm"] = use_farm
|
||||
|
||||
review = pre_create_data["mark_for_review"]
|
||||
new_instance.creator_attributes["mark_for_review"] = review
|
||||
|
||||
api.get_stub().imprint(new_instance.id,
|
||||
new_instance.data_to_store())
|
||||
self._add_instance_to_context(new_instance)
|
||||
|
||||
stub.rename_item(comp.id, subset_name)
|
||||
|
||||
def get_default_variants(self):
|
||||
return self._default_variants
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return [BoolDef("farm", label="Render on farm")]
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
output = [
|
||||
BoolDef("use_selection", default=True, label="Use selection"),
|
||||
BoolDef("use_composition_name",
|
||||
label="Use composition name in subset"),
|
||||
UISeparatorDef(),
|
||||
BoolDef("farm", label="Render on farm")
|
||||
BoolDef("farm", label="Render on farm"),
|
||||
BoolDef(
|
||||
"mark_for_review",
|
||||
label="Review",
|
||||
default=self.mark_for_review
|
||||
)
|
||||
]
|
||||
return output
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return [
|
||||
BoolDef("farm", label="Render on farm"),
|
||||
BoolDef(
|
||||
"mark_for_review",
|
||||
label="Review",
|
||||
default=False
|
||||
)
|
||||
]
|
||||
|
||||
def get_icon(self):
|
||||
return resources.get_openpype_splash_filepath()
|
||||
|
||||
|
|
@ -143,6 +152,13 @@ class RenderCreator(Creator):
|
|||
api.get_stub().rename_item(comp_id,
|
||||
new_comp_name)
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
plugin_settings = (
|
||||
project_settings["aftereffects"]["create"]["RenderCreator"]
|
||||
)
|
||||
|
||||
self.mark_for_review = plugin_settings["mark_for_review"]
|
||||
|
||||
def get_detail_description(self):
|
||||
return """Creator for Render instances
|
||||
|
||||
|
|
@ -201,4 +217,7 @@ class RenderCreator(Creator):
|
|||
instance_data["creator_attributes"] = {"farm": is_old_farm}
|
||||
instance_data["family"] = self.family
|
||||
|
||||
if instance_data["creator_attributes"].get("mark_for_review") is None:
|
||||
instance_data["creator_attributes"]["mark_for_review"] = True
|
||||
|
||||
return instance_data
|
||||
|
|
|
|||
|
|
@ -88,10 +88,11 @@ class CollectAERender(publish.AbstractCollectRender):
|
|||
raise ValueError("No file extension set in Render Queue")
|
||||
render_item = render_q[0]
|
||||
|
||||
instance_families = inst.data.get("families", [])
|
||||
subset_name = inst.data["subset"]
|
||||
instance = AERenderInstance(
|
||||
family="render",
|
||||
families=inst.data.get("families", []),
|
||||
families=instance_families,
|
||||
version=version,
|
||||
time="",
|
||||
source=current_file,
|
||||
|
|
@ -109,6 +110,7 @@ class CollectAERender(publish.AbstractCollectRender):
|
|||
tileRendering=False,
|
||||
tilesX=0,
|
||||
tilesY=0,
|
||||
review="review" in instance_families,
|
||||
frameStart=frame_start,
|
||||
frameEnd=frame_end,
|
||||
frameStep=1,
|
||||
|
|
@ -139,6 +141,9 @@ class CollectAERender(publish.AbstractCollectRender):
|
|||
instance.toBeRenderedOn = "deadline"
|
||||
instance.renderer = "aerender"
|
||||
instance.farm = True # to skip integrate
|
||||
if "review" in instance.families:
|
||||
# to skip ExtractReview locally
|
||||
instance.families.remove("review")
|
||||
|
||||
instances.append(instance)
|
||||
instances_to_remove.append(inst)
|
||||
|
|
@ -218,15 +223,4 @@ class CollectAERender(publish.AbstractCollectRender):
|
|||
if fam not in instance.families:
|
||||
instance.families.append(fam)
|
||||
|
||||
settings = get_project_settings(os.getenv("AVALON_PROJECT"))
|
||||
reviewable_subset_filter = (settings["deadline"]
|
||||
["publish"]
|
||||
["ProcessSubmittedJobOnFarm"]
|
||||
["aov_filter"].get(self.hosts[0]))
|
||||
for aov_pattern in reviewable_subset_filter:
|
||||
if re.match(aov_pattern, instance.subset):
|
||||
instance.families.append("review")
|
||||
instance.review = True
|
||||
break
|
||||
|
||||
return instance
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
"""
|
||||
Requires:
|
||||
None
|
||||
|
||||
Provides:
|
||||
instance -> family ("review")
|
||||
"""
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectReview(pyblish.api.ContextPlugin):
|
||||
"""Add review to families if instance created with 'mark_for_review' flag
|
||||
"""
|
||||
label = "Collect Review"
|
||||
hosts = ["aftereffects"]
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
|
||||
def process(self, context):
|
||||
for instance in context:
|
||||
creator_attributes = instance.data.get("creator_attributes") or {}
|
||||
if (
|
||||
creator_attributes.get("mark_for_review")
|
||||
and "review" not in instance.data["families"]
|
||||
):
|
||||
instance.data["families"].append("review")
|
||||
|
|
@ -66,33 +66,9 @@ class ExtractLocalRender(publish.Extractor):
|
|||
first_repre = not representations
|
||||
if instance.data["review"] and first_repre:
|
||||
repre_data["tags"] = ["review"]
|
||||
thumbnail_path = os.path.join(staging_dir, files[0])
|
||||
instance.data["thumbnailSource"] = thumbnail_path
|
||||
|
||||
representations.append(repre_data)
|
||||
|
||||
instance.data["representations"] = representations
|
||||
|
||||
ffmpeg_path = get_ffmpeg_tool_path("ffmpeg")
|
||||
# Generate thumbnail.
|
||||
thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg")
|
||||
|
||||
args = [
|
||||
ffmpeg_path, "-y",
|
||||
"-i", first_file_path,
|
||||
"-vf", "scale=300:-1",
|
||||
"-vframes", "1",
|
||||
thumbnail_path
|
||||
]
|
||||
self.log.debug("Thumbnail args:: {}".format(args))
|
||||
try:
|
||||
output = run_subprocess(args)
|
||||
except TypeError:
|
||||
self.log.warning("Error in creating thumbnail")
|
||||
six.reraise(*sys.exc_info())
|
||||
|
||||
instance.data["representations"].append({
|
||||
"name": "thumbnail",
|
||||
"ext": "jpg",
|
||||
"files": os.path.basename(thumbnail_path),
|
||||
"stagingDir": staging_dir,
|
||||
"tags": ["thumbnail"]
|
||||
})
|
||||
|
|
|
|||
46
openpype/hosts/houdini/api/action.py
Normal file
46
openpype/hosts/houdini/api/action.py
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
import pyblish.api
|
||||
import hou
|
||||
|
||||
from openpype.pipeline.publish import get_errored_instances_from_context
|
||||
|
||||
|
||||
class SelectInvalidAction(pyblish.api.Action):
|
||||
"""Select invalid nodes in Maya when plug-in failed.
|
||||
|
||||
To retrieve the invalid nodes this assumes a static `get_invalid()`
|
||||
method is available on the plugin.
|
||||
|
||||
"""
|
||||
label = "Select invalid"
|
||||
on = "failed" # This action is only available on a failed plug-in
|
||||
icon = "search" # Icon from Awesome Icon
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
errored_instances = get_errored_instances_from_context(context)
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
|
||||
|
||||
# Get the invalid nodes for the plug-ins
|
||||
self.log.info("Finding invalid nodes..")
|
||||
invalid = list()
|
||||
for instance in instances:
|
||||
invalid_nodes = plugin.get_invalid(instance)
|
||||
if invalid_nodes:
|
||||
if isinstance(invalid_nodes, (list, tuple)):
|
||||
invalid.extend(invalid_nodes)
|
||||
else:
|
||||
self.log.warning("Plug-in returned to be invalid, "
|
||||
"but has no selectable nodes.")
|
||||
|
||||
hou.clearAllSelected()
|
||||
if invalid:
|
||||
self.log.info("Selecting invalid nodes: {}".format(
|
||||
", ".join(node.path() for node in invalid)
|
||||
))
|
||||
for node in invalid:
|
||||
node.setSelected(True)
|
||||
node.setCurrent(True)
|
||||
else:
|
||||
self.log.info("No invalid nodes found.")
|
||||
|
|
@ -12,26 +12,43 @@ import tempfile
|
|||
import logging
|
||||
import os
|
||||
|
||||
from openpype.client import get_asset_by_name
|
||||
from openpype.pipeline import registered_host
|
||||
from openpype.pipeline.create import CreateContext
|
||||
from openpype.resources import get_openpype_icon_filepath
|
||||
|
||||
import hou
|
||||
import stateutils
|
||||
import soptoolutils
|
||||
import loptoolutils
|
||||
import cop2toolutils
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
CATEGORY_GENERIC_TOOL = {
|
||||
hou.sopNodeTypeCategory(): soptoolutils.genericTool,
|
||||
hou.cop2NodeTypeCategory(): cop2toolutils.genericTool,
|
||||
hou.lopNodeTypeCategory(): loptoolutils.genericTool
|
||||
}
|
||||
|
||||
|
||||
CREATE_SCRIPT = """
|
||||
from openpype.hosts.houdini.api.creator_node_shelves import create_interactive
|
||||
create_interactive("{identifier}")
|
||||
create_interactive("{identifier}", **kwargs)
|
||||
"""
|
||||
|
||||
|
||||
def create_interactive(creator_identifier):
|
||||
def create_interactive(creator_identifier, **kwargs):
|
||||
"""Create a Creator using its identifier interactively.
|
||||
|
||||
This is used by the generated shelf tools as callback when a user selects
|
||||
the creator from the node tab search menu.
|
||||
|
||||
The `kwargs` should be what Houdini passes to the tool create scripts
|
||||
context. For more information see:
|
||||
https://www.sidefx.com/docs/houdini/hom/tool_script.html#arguments
|
||||
|
||||
Args:
|
||||
creator_identifier (str): The creator identifier of the Creator plugin
|
||||
to create.
|
||||
|
|
@ -58,6 +75,33 @@ def create_interactive(creator_identifier):
|
|||
|
||||
host = registered_host()
|
||||
context = CreateContext(host)
|
||||
creator = context.manual_creators.get(creator_identifier)
|
||||
if not creator:
|
||||
raise RuntimeError("Invalid creator identifier: "
|
||||
"{}".format(creator_identifier))
|
||||
|
||||
# TODO: Once more elaborate unique create behavior should exist per Creator
|
||||
# instead of per network editor area then we should move this from here
|
||||
# to a method on the Creators for which this could be the default
|
||||
# implementation.
|
||||
pane = stateutils.activePane(kwargs)
|
||||
if isinstance(pane, hou.NetworkEditor):
|
||||
pwd = pane.pwd()
|
||||
subset_name = creator.get_subset_name(
|
||||
variant=variant,
|
||||
task_name=context.get_current_task_name(),
|
||||
asset_doc=get_asset_by_name(
|
||||
project_name=context.get_current_project_name(),
|
||||
asset_name=context.get_current_asset_name()
|
||||
),
|
||||
project_name=context.get_current_project_name(),
|
||||
host_name=context.host_name
|
||||
)
|
||||
|
||||
tool_fn = CATEGORY_GENERIC_TOOL.get(pwd.childTypeCategory())
|
||||
if tool_fn is not None:
|
||||
out_null = tool_fn(kwargs, "null")
|
||||
out_null.setName("OUT_{}".format(subset_name), unique_name=True)
|
||||
|
||||
before = context.instances_by_id.copy()
|
||||
|
||||
|
|
@ -135,12 +179,20 @@ def install():
|
|||
|
||||
log.debug("Writing OpenPype Creator nodes to shelf: {}".format(filepath))
|
||||
tools = []
|
||||
|
||||
with shelves_change_block():
|
||||
for identifier, creator in create_context.manual_creators.items():
|
||||
|
||||
# TODO: Allow the creator plug-in itself to override the categories
|
||||
# for where they are shown, by e.g. defining
|
||||
# `Creator.get_network_categories()`
|
||||
# Allow the creator plug-in itself to override the categories
|
||||
# for where they are shown with `Creator.get_network_categories()`
|
||||
if not hasattr(creator, "get_network_categories"):
|
||||
log.debug("Creator {} has no `get_network_categories` method "
|
||||
"and will not be added to TAB search.")
|
||||
continue
|
||||
|
||||
network_categories = creator.get_network_categories()
|
||||
if not network_categories:
|
||||
continue
|
||||
|
||||
key = "openpype_create.{}".format(identifier)
|
||||
log.debug(f"Registering {key}")
|
||||
|
|
@ -153,17 +205,13 @@ def install():
|
|||
creator.label
|
||||
),
|
||||
"help_url": None,
|
||||
"network_categories": [
|
||||
hou.ropNodeTypeCategory(),
|
||||
hou.sopNodeTypeCategory()
|
||||
],
|
||||
"network_categories": network_categories,
|
||||
"viewer_categories": [],
|
||||
"cop_viewer_categories": [],
|
||||
"network_op_type": None,
|
||||
"viewer_op_type": None,
|
||||
"locations": ["OpenPype"]
|
||||
}
|
||||
|
||||
label = "Create {}".format(creator.label)
|
||||
tool = hou.shelves.tool(key)
|
||||
if tool:
|
||||
|
|
|
|||
|
|
@ -276,3 +276,19 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
|
|||
color = hou.Color((0.616, 0.871, 0.769))
|
||||
node.setUserData('nodeshape', shape)
|
||||
node.setColor(color)
|
||||
|
||||
def get_network_categories(self):
|
||||
"""Return in which network view type this creator should show.
|
||||
|
||||
The node type categories returned here will be used to define where
|
||||
the creator will show up in the TAB search for nodes in Houdini's
|
||||
Network View.
|
||||
|
||||
This can be overridden in inherited classes to define where that
|
||||
particular Creator should be visible in the TAB search.
|
||||
|
||||
Returns:
|
||||
list: List of houdini node type categories
|
||||
|
||||
"""
|
||||
return [hou.ropNodeTypeCategory()]
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
from openpype.hosts.houdini.api import plugin
|
||||
from openpype.pipeline import CreatedInstance, CreatorError
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class CreateAlembicCamera(plugin.HoudiniCreator):
|
||||
"""Single baked camera from Alembic ROP."""
|
||||
|
|
@ -47,3 +49,9 @@ class CreateAlembicCamera(plugin.HoudiniCreator):
|
|||
self.lock_parameters(instance_node, to_lock)
|
||||
|
||||
instance_node.parm("trange").set(1)
|
||||
|
||||
def get_network_categories(self):
|
||||
return [
|
||||
hou.ropNodeTypeCategory(),
|
||||
hou.objNodeTypeCategory()
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating composite sequences."""
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
from openpype.pipeline import CreatedInstance
|
||||
from openpype.pipeline import CreatedInstance, CreatorError
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class CreateCompositeSequence(plugin.HoudiniCreator):
|
||||
|
|
@ -35,8 +37,20 @@ class CreateCompositeSequence(plugin.HoudiniCreator):
|
|||
"copoutput": filepath
|
||||
}
|
||||
|
||||
if self.selected_nodes:
|
||||
if len(self.selected_nodes) > 1:
|
||||
raise CreatorError("More than one item selected.")
|
||||
path = self.selected_nodes[0].path()
|
||||
parms["coppath"] = path
|
||||
|
||||
instance_node.setParms(parms)
|
||||
|
||||
# Lock any parameters in this list
|
||||
to_lock = ["prim_to_detail_pattern"]
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
||||
def get_network_categories(self):
|
||||
return [
|
||||
hou.ropNodeTypeCategory(),
|
||||
hou.cop2NodeTypeCategory()
|
||||
]
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
from openpype.hosts.houdini.api import plugin
|
||||
from openpype.pipeline import CreatedInstance
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class CreatePointCache(plugin.HoudiniCreator):
|
||||
"""Alembic ROP to pointcache"""
|
||||
|
|
@ -49,3 +51,9 @@ class CreatePointCache(plugin.HoudiniCreator):
|
|||
# Lock any parameters in this list
|
||||
to_lock = ["prim_to_detail_pattern"]
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
||||
def get_network_categories(self):
|
||||
return [
|
||||
hou.ropNodeTypeCategory(),
|
||||
hou.sopNodeTypeCategory()
|
||||
]
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
from openpype.hosts.houdini.api import plugin
|
||||
from openpype.pipeline import CreatedInstance
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class CreateUSD(plugin.HoudiniCreator):
|
||||
"""Universal Scene Description"""
|
||||
|
|
@ -13,7 +15,6 @@ class CreateUSD(plugin.HoudiniCreator):
|
|||
enabled = False
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
import hou # noqa
|
||||
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "usd"})
|
||||
|
|
@ -43,3 +44,9 @@ class CreateUSD(plugin.HoudiniCreator):
|
|||
"id",
|
||||
]
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
||||
def get_network_categories(self):
|
||||
return [
|
||||
hou.ropNodeTypeCategory(),
|
||||
hou.lopNodeTypeCategory()
|
||||
]
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
from openpype.hosts.houdini.api import plugin
|
||||
from openpype.pipeline import CreatedInstance
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class CreateVDBCache(plugin.HoudiniCreator):
|
||||
"""OpenVDB from Geometry ROP"""
|
||||
|
|
@ -34,3 +36,9 @@ class CreateVDBCache(plugin.HoudiniCreator):
|
|||
parms["soppath"] = self.selected_nodes[0].path()
|
||||
|
||||
instance_node.setParms(parms)
|
||||
|
||||
def get_network_categories(self):
|
||||
return [
|
||||
hou.ropNodeTypeCategory(),
|
||||
hou.sopNodeTypeCategory()
|
||||
]
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator):
|
|||
identifier = "io.openpype.creators.houdini.workfile"
|
||||
label = "Workfile"
|
||||
family = "workfile"
|
||||
icon = "document"
|
||||
icon = "fa5.file"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
|
|
|
|||
|
|
@ -19,6 +19,9 @@ class CollectHoudiniReviewData(pyblish.api.InstancePlugin):
|
|||
instance.data["handleEnd"] = 0
|
||||
instance.data["fps"] = instance.context.data["fps"]
|
||||
|
||||
# Enable ftrack functionality
|
||||
instance.data.setdefault("families", []).append('ftrack')
|
||||
|
||||
# Get the camera from the rop node to collect the focal length
|
||||
ropnode_path = instance.data["instance_node"]
|
||||
ropnode = hou.node(ropnode_path)
|
||||
|
|
@ -26,8 +29,9 @@ class CollectHoudiniReviewData(pyblish.api.InstancePlugin):
|
|||
camera_path = ropnode.parm("camera").eval()
|
||||
camera_node = hou.node(camera_path)
|
||||
if not camera_node:
|
||||
raise RuntimeError("No valid camera node found on review node: "
|
||||
"{}".format(camera_path))
|
||||
self.log.warning("No valid camera node found on review node: "
|
||||
"{}".format(camera_path))
|
||||
return
|
||||
|
||||
# Collect focal length.
|
||||
focal_length_parm = camera_node.parm("focal")
|
||||
|
|
@ -49,5 +53,3 @@ class CollectHoudiniReviewData(pyblish.api.InstancePlugin):
|
|||
# Store focal length in `burninDataMembers`
|
||||
burnin_members = instance.data.setdefault("burninDataMembers", {})
|
||||
burnin_members["focalLength"] = focal_length
|
||||
|
||||
instance.data.setdefault("families", []).append('ftrack')
|
||||
|
|
|
|||
|
|
@ -2,27 +2,20 @@ import os
|
|||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import (
|
||||
publish,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class ExtractOpenGL(publish.Extractor,
|
||||
OptionalPyblishPluginMixin):
|
||||
class ExtractOpenGL(publish.Extractor):
|
||||
|
||||
order = pyblish.api.ExtractorOrder - 0.01
|
||||
label = "Extract OpenGL"
|
||||
families = ["review"]
|
||||
hosts = ["houdini"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
ropnode = hou.node(instance.data.get("instance_node"))
|
||||
|
||||
output = ropnode.evalParm("picture")
|
||||
|
|
|
|||
|
|
@ -1,21 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Scene setting</title>
|
||||
<description>
|
||||
## Invalid input node
|
||||
|
||||
VDB input must have the same number of VDBs, points, primitives and vertices as output.
|
||||
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__ (optional)
|
||||
|
||||
A VDB is an inherited type of Prim, holds the following data:
|
||||
- Primitives: 1
|
||||
- Points: 1
|
||||
- Vertices: 1
|
||||
- VDBs: 1
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Invalid VDB</title>
|
||||
<description>
|
||||
## Invalid VDB output
|
||||
|
||||
All primitives of the output geometry must be VDBs, no other primitive
|
||||
types are allowed. That means that regardless of the amount of VDBs in the
|
||||
geometry it will have an equal amount of VDBs, points, primitives and
|
||||
vertices since each VDB primitive is one point, one vertex and one VDB.
|
||||
|
||||
This validation only checks the geometry on the first frame of the export
|
||||
frame range.
|
||||
|
||||
|
||||
|
||||
</description>
|
||||
<detail>
|
||||
### Detailed Info
|
||||
|
||||
ROP node `{rop_path}` is set to export SOP path `{sop_path}`.
|
||||
|
||||
{message}
|
||||
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -16,15 +16,19 @@ class ValidateSceneReview(pyblish.api.InstancePlugin):
|
|||
label = "Scene Setting for review"
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid_scene_path(instance)
|
||||
|
||||
report = []
|
||||
if invalid:
|
||||
report.append(
|
||||
"Scene path does not exist: '%s'" % invalid[0],
|
||||
)
|
||||
instance_node = hou.node(instance.data.get("instance_node"))
|
||||
|
||||
invalid = self.get_invalid_resolution(instance)
|
||||
invalid = self.get_invalid_scene_path(instance_node)
|
||||
if invalid:
|
||||
report.append(invalid)
|
||||
|
||||
invalid = self.get_invalid_camera_path(instance_node)
|
||||
if invalid:
|
||||
report.append(invalid)
|
||||
|
||||
invalid = self.get_invalid_resolution(instance_node)
|
||||
if invalid:
|
||||
report.extend(invalid)
|
||||
|
||||
|
|
@ -33,26 +37,36 @@ class ValidateSceneReview(pyblish.api.InstancePlugin):
|
|||
"\n\n".join(report),
|
||||
title=self.label)
|
||||
|
||||
def get_invalid_scene_path(self, instance):
|
||||
|
||||
node = hou.node(instance.data.get("instance_node"))
|
||||
scene_path_parm = node.parm("scenepath")
|
||||
def get_invalid_scene_path(self, rop_node):
|
||||
scene_path_parm = rop_node.parm("scenepath")
|
||||
scene_path_node = scene_path_parm.evalAsNode()
|
||||
if not scene_path_node:
|
||||
return [scene_path_parm.evalAsString()]
|
||||
path = scene_path_parm.evalAsString()
|
||||
return "Scene path does not exist: '{}'".format(path)
|
||||
|
||||
def get_invalid_resolution(self, instance):
|
||||
node = hou.node(instance.data.get("instance_node"))
|
||||
def get_invalid_camera_path(self, rop_node):
|
||||
camera_path_parm = rop_node.parm("camera")
|
||||
camera_node = camera_path_parm.evalAsNode()
|
||||
path = camera_path_parm.evalAsString()
|
||||
if not camera_node:
|
||||
return "Camera path does not exist: '{}'".format(path)
|
||||
type_name = camera_node.type().name()
|
||||
if type_name != "cam":
|
||||
return "Camera path is not a camera: '{}' (type: {})".format(
|
||||
path, type_name
|
||||
)
|
||||
|
||||
def get_invalid_resolution(self, rop_node):
|
||||
|
||||
# The resolution setting is only used when Override Camera Resolution
|
||||
# is enabled. So we skip validation if it is disabled.
|
||||
override = node.parm("tres").eval()
|
||||
override = rop_node.parm("tres").eval()
|
||||
if not override:
|
||||
return
|
||||
|
||||
invalid = []
|
||||
res_width = node.parm("res1").eval()
|
||||
res_height = node.parm("res2").eval()
|
||||
res_width = rop_node.parm("res1").eval()
|
||||
res_height = rop_node.parm("res2").eval()
|
||||
if res_width == 0:
|
||||
invalid.append("Override Resolution width is set to zero.")
|
||||
if res_height == 0:
|
||||
|
|
|
|||
|
|
@ -1,52 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from openpype.pipeline import (
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
class ValidateVDBInputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate that the node connected to the output node is of type VDB.
|
||||
|
||||
Regardless of the amount of VDBs create the output will need to have an
|
||||
equal amount of VDBs, points, primitives and vertices
|
||||
|
||||
A VDB is an inherited type of Prim, holds the following data:
|
||||
- Primitives: 1
|
||||
- Points: 1
|
||||
- Vertices: 1
|
||||
- VDBs: 1
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder + 0.1
|
||||
families = ["vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Input Node (VDB)"
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
self,
|
||||
"Node connected to the output node is not of type VDB",
|
||||
title=self.label
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
node = instance.data["output_node"]
|
||||
|
||||
prims = node.geometry().prims()
|
||||
nr_of_prims = len(prims)
|
||||
|
||||
nr_of_points = len(node.geometry().points())
|
||||
if nr_of_points != nr_of_prims:
|
||||
cls.log.error("The number of primitives and points do not match")
|
||||
return [instance]
|
||||
|
||||
for prim in prims:
|
||||
if prim.numVertices() != 1:
|
||||
cls.log.error("Found primitive with more than 1 vertex!")
|
||||
return [instance]
|
||||
|
|
@ -1,14 +1,73 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import contextlib
|
||||
|
||||
import pyblish.api
|
||||
import hou
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
from openpype.hosts.houdini.api.action import SelectInvalidAction
|
||||
|
||||
|
||||
def group_consecutive_numbers(nums):
|
||||
"""
|
||||
Args:
|
||||
nums (list): List of sorted integer numbers.
|
||||
|
||||
Yields:
|
||||
str: Group ranges as {start}-{end} if more than one number in the range
|
||||
else it yields {end}
|
||||
|
||||
"""
|
||||
start = None
|
||||
end = None
|
||||
|
||||
def _result(a, b):
|
||||
if a == b:
|
||||
return "{}".format(a)
|
||||
else:
|
||||
return "{}-{}".format(a, b)
|
||||
|
||||
for num in nums:
|
||||
if start is None:
|
||||
start = num
|
||||
end = num
|
||||
elif num == end + 1:
|
||||
end = num
|
||||
else:
|
||||
yield _result(start, end)
|
||||
start = num
|
||||
end = num
|
||||
if start is not None:
|
||||
yield _result(start, end)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def update_mode_context(mode):
|
||||
original = hou.updateModeSetting()
|
||||
try:
|
||||
hou.setUpdateMode(mode)
|
||||
yield
|
||||
finally:
|
||||
hou.setUpdateMode(original)
|
||||
|
||||
|
||||
def get_geometry_at_frame(sop_node, frame, force=True):
|
||||
"""Return geometry at frame but force a cooked value."""
|
||||
with update_mode_context(hou.updateMode.AutoUpdate):
|
||||
sop_node.cook(force=force, frame_range=(frame, frame))
|
||||
return sop_node.geometryAtFrame(frame)
|
||||
|
||||
|
||||
class ValidateVDBOutputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate that the node connected to the output node is of type VDB.
|
||||
|
||||
Regardless of the amount of VDBs create the output will need to have an
|
||||
equal amount of VDBs, points, primitives and vertices
|
||||
All primitives of the output geometry must be VDBs, no other primitive
|
||||
types are allowed. That means that regardless of the amount of VDBs in the
|
||||
geometry it will have an equal amount of VDBs, points, primitives and
|
||||
vertices since each VDB primitive is one point, one vertex and one VDB.
|
||||
|
||||
This validation only checks the geometry on the first frame of the export
|
||||
frame range for optimization purposes.
|
||||
|
||||
A VDB is an inherited type of Prim, holds the following data:
|
||||
- Primitives: 1
|
||||
|
|
@ -22,54 +81,95 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin):
|
|||
families = ["vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Output Node (VDB)"
|
||||
actions = [SelectInvalidAction]
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
"Node connected to the output node is not" " of type VDB!",
|
||||
title=self.label
|
||||
invalid_nodes, message = self.get_invalid_with_message(instance)
|
||||
if invalid_nodes:
|
||||
|
||||
# instance_node is str, but output_node is hou.Node so we convert
|
||||
output = instance.data.get("output_node")
|
||||
output_path = output.path() if output else None
|
||||
|
||||
raise PublishXmlValidationError(
|
||||
self,
|
||||
"Invalid VDB content: {}".format(message),
|
||||
formatting_data={
|
||||
"message": message,
|
||||
"rop_path": instance.data.get("instance_node"),
|
||||
"sop_path": output_path
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
def get_invalid_with_message(cls, instance):
|
||||
|
||||
node = instance.data["output_node"]
|
||||
node = instance.data.get("output_node")
|
||||
if node is None:
|
||||
cls.log.error(
|
||||
instance_node = instance.data.get("instance_node")
|
||||
error = (
|
||||
"SOP path is not correctly set on "
|
||||
"ROP node '%s'." % instance.data.get("instance_node")
|
||||
"ROP node `{}`.".format(instance_node)
|
||||
)
|
||||
return [instance]
|
||||
return [hou.node(instance_node), error]
|
||||
|
||||
frame = instance.data.get("frameStart", 0)
|
||||
geometry = node.geometryAtFrame(frame)
|
||||
geometry = get_geometry_at_frame(node, frame)
|
||||
if geometry is None:
|
||||
# No geometry data on this node, maybe the node hasn't cooked?
|
||||
cls.log.error(
|
||||
"SOP node has no geometry data. "
|
||||
"Is it cooked? %s" % node.path()
|
||||
error = (
|
||||
"SOP node `{}` has no geometry data. "
|
||||
"Was it unable to cook?".format(node.path())
|
||||
)
|
||||
return [node]
|
||||
return [node, error]
|
||||
|
||||
prims = geometry.prims()
|
||||
nr_of_prims = len(prims)
|
||||
num_prims = geometry.intrinsicValue("primitivecount")
|
||||
num_points = geometry.intrinsicValue("pointcount")
|
||||
if num_prims == 0 and num_points == 0:
|
||||
# Since we are only checking the first frame it doesn't mean there
|
||||
# won't be VDB prims in a few frames. As such we'll assume for now
|
||||
# the user knows what he or she is doing
|
||||
cls.log.warning(
|
||||
"SOP node `{}` has no primitives on start frame {}. "
|
||||
"Validation is skipped and it is assumed elsewhere in the "
|
||||
"frame range VDB prims and only VDB prims will exist."
|
||||
"".format(node.path(), int(frame))
|
||||
)
|
||||
return [None, None]
|
||||
|
||||
# All primitives must be hou.VDB
|
||||
invalid_prim = False
|
||||
for prim in prims:
|
||||
if not isinstance(prim, hou.VDB):
|
||||
cls.log.error("Found non-VDB primitive: %s" % prim)
|
||||
invalid_prim = True
|
||||
if invalid_prim:
|
||||
return [instance]
|
||||
num_vdb_prims = geometry.countPrimType(hou.primType.VDB)
|
||||
cls.log.debug("Detected {} VDB primitives".format(num_vdb_prims))
|
||||
if num_prims != num_vdb_prims:
|
||||
# There's at least one primitive that is not a VDB.
|
||||
# Search them and report them to the artist.
|
||||
prims = geometry.prims()
|
||||
invalid_prims = [prim for prim in prims
|
||||
if not isinstance(prim, hou.VDB)]
|
||||
if invalid_prims:
|
||||
# Log prim numbers as consecutive ranges so logging isn't very
|
||||
# slow for large number of primitives
|
||||
error = (
|
||||
"Found non-VDB primitives for `{}`. "
|
||||
"Primitive indices {} are not VDB primitives.".format(
|
||||
node.path(),
|
||||
", ".join(group_consecutive_numbers(
|
||||
prim.number() for prim in invalid_prims
|
||||
))
|
||||
)
|
||||
)
|
||||
return [node, error]
|
||||
|
||||
nr_of_points = len(geometry.points())
|
||||
if nr_of_points != nr_of_prims:
|
||||
cls.log.error("The number of primitives and points do not match")
|
||||
return [instance]
|
||||
if num_points != num_vdb_prims:
|
||||
# We have points unrelated to the VDB primitives.
|
||||
error = (
|
||||
"The number of primitives and points do not match in '{}'. "
|
||||
"This likely means you have unconnected points, which we do "
|
||||
"not allow in the VDB output.".format(node.path()))
|
||||
return [node, error]
|
||||
|
||||
for prim in prims:
|
||||
if prim.numVertices() != 1:
|
||||
cls.log.error("Found primitive with more than 1 vertex!")
|
||||
return [instance]
|
||||
return [None, None]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
nodes, _ = cls.get_invalid_with_message(instance)
|
||||
return nodes
|
||||
|
|
|
|||
28
openpype/hosts/max/plugins/create/create_model.py
Normal file
28
openpype/hosts/max/plugins/create/create_model.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for model."""
|
||||
from openpype.hosts.max.api import plugin
|
||||
from openpype.pipeline import CreatedInstance
|
||||
|
||||
|
||||
class CreateModel(plugin.MaxCreator):
|
||||
identifier = "io.openpype.creators.max.model"
|
||||
label = "Model"
|
||||
family = "model"
|
||||
icon = "gear"
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
from pymxs import runtime as rt
|
||||
instance = super(CreateModel, self).create(
|
||||
subset_name,
|
||||
instance_data,
|
||||
pre_create_data) # type: CreatedInstance
|
||||
container = rt.getNodeByName(instance.data.get("instance_node"))
|
||||
# TODO: Disable "Add to Containers?" Panel
|
||||
# parent the selected cameras into the container
|
||||
sel_obj = None
|
||||
if self.selected_nodes:
|
||||
sel_obj = list(self.selected_nodes)
|
||||
for obj in sel_obj:
|
||||
obj.parent = container
|
||||
# for additional work on the node:
|
||||
# instance_node = rt.getNodeByName(instance.get("instance_node"))
|
||||
|
|
@ -10,7 +10,9 @@ class MaxSceneLoader(load.LoaderPlugin):
|
|||
"""Max Scene Loader"""
|
||||
|
||||
families = ["camera",
|
||||
"maxScene"]
|
||||
"maxScene",
|
||||
"model"]
|
||||
|
||||
representations = ["max"]
|
||||
order = -8
|
||||
icon = "code-fork"
|
||||
|
|
|
|||
109
openpype/hosts/max/plugins/load/load_model.py
Normal file
109
openpype/hosts/max/plugins/load/load_model.py
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
|
||||
import os
|
||||
from openpype.pipeline import (
|
||||
load, get_representation_path
|
||||
)
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.lib import maintained_selection
|
||||
|
||||
|
||||
class ModelAbcLoader(load.LoaderPlugin):
|
||||
"""Loading model with the Alembic loader."""
|
||||
|
||||
families = ["model"]
|
||||
label = "Load Model(Alembic)"
|
||||
representations = ["abc"]
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
file_path = os.path.normpath(self.fname)
|
||||
|
||||
abc_before = {
|
||||
c for c in rt.rootNode.Children
|
||||
if rt.classOf(c) == rt.AlembicContainer
|
||||
}
|
||||
|
||||
abc_import_cmd = (f"""
|
||||
AlembicImport.ImportToRoot = false
|
||||
AlembicImport.CustomAttributes = true
|
||||
AlembicImport.UVs = true
|
||||
AlembicImport.VertexColors = true
|
||||
|
||||
importFile @"{file_path}" #noPrompt
|
||||
""")
|
||||
|
||||
self.log.debug(f"Executing command: {abc_import_cmd}")
|
||||
rt.execute(abc_import_cmd)
|
||||
|
||||
abc_after = {
|
||||
c for c in rt.rootNode.Children
|
||||
if rt.classOf(c) == rt.AlembicContainer
|
||||
}
|
||||
|
||||
# This should yield new AlembicContainer node
|
||||
abc_containers = abc_after.difference(abc_before)
|
||||
|
||||
if len(abc_containers) != 1:
|
||||
self.log.error("Something failed when loading.")
|
||||
|
||||
abc_container = abc_containers.pop()
|
||||
|
||||
return containerise(
|
||||
name, [abc_container], context, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
path = get_representation_path(representation)
|
||||
node = rt.getNodeByName(container["instance_node"])
|
||||
rt.select(node.Children)
|
||||
|
||||
for alembic in rt.selection:
|
||||
abc = rt.getNodeByName(alembic.name)
|
||||
rt.select(abc.Children)
|
||||
for abc_con in rt.selection:
|
||||
container = rt.getNodeByName(abc_con.name)
|
||||
container.source = path
|
||||
rt.select(container.Children)
|
||||
for abc_obj in rt.selection:
|
||||
alembic_obj = rt.getNodeByName(abc_obj.name)
|
||||
alembic_obj.source = path
|
||||
|
||||
with maintained_selection():
|
||||
rt.select(node)
|
||||
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": str(representation["_id"])
|
||||
})
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
node = rt.getNodeByName(container["instance_node"])
|
||||
rt.delete(node)
|
||||
|
||||
@staticmethod
|
||||
def get_container_children(parent, type_name):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
def list_children(node):
|
||||
children = []
|
||||
for c in node.Children:
|
||||
children.append(c)
|
||||
children += list_children(c)
|
||||
return children
|
||||
|
||||
filtered = []
|
||||
for child in list_children(parent):
|
||||
class_type = str(rt.classOf(child.baseObject))
|
||||
if class_type == type_name:
|
||||
filtered.append(child)
|
||||
|
||||
return filtered
|
||||
77
openpype/hosts/max/plugins/load/load_model_fbx.py
Normal file
77
openpype/hosts/max/plugins/load/load_model_fbx.py
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
import os
|
||||
from openpype.pipeline import (
|
||||
load,
|
||||
get_representation_path
|
||||
)
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.lib import maintained_selection
|
||||
|
||||
|
||||
class FbxModelLoader(load.LoaderPlugin):
|
||||
"""Fbx Model Loader"""
|
||||
|
||||
families = ["model"]
|
||||
representations = ["fbx"]
|
||||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
filepath = os.path.normpath(self.fname)
|
||||
|
||||
fbx_import_cmd = (
|
||||
f"""
|
||||
|
||||
FBXImporterSetParam "Animation" false
|
||||
FBXImporterSetParam "Cameras" false
|
||||
FBXImporterSetParam "AxisConversionMethod" true
|
||||
FbxExporterSetParam "UpAxis" "Y"
|
||||
FbxExporterSetParam "Preserveinstances" true
|
||||
|
||||
importFile @"{filepath}" #noPrompt using:FBXIMP
|
||||
""")
|
||||
|
||||
self.log.debug(f"Executing command: {fbx_import_cmd}")
|
||||
rt.execute(fbx_import_cmd)
|
||||
|
||||
asset = rt.getNodeByName(f"{name}")
|
||||
|
||||
return containerise(
|
||||
name, [asset], context, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
path = get_representation_path(representation)
|
||||
node = rt.getNodeByName(container["instance_node"])
|
||||
rt.select(node.Children)
|
||||
fbx_reimport_cmd = (
|
||||
f"""
|
||||
FBXImporterSetParam "Animation" false
|
||||
FBXImporterSetParam "Cameras" false
|
||||
FBXImporterSetParam "AxisConversionMethod" true
|
||||
FbxExporterSetParam "UpAxis" "Y"
|
||||
FbxExporterSetParam "Preserveinstances" true
|
||||
|
||||
importFile @"{path}" #noPrompt using:FBXIMP
|
||||
""")
|
||||
rt.execute(fbx_reimport_cmd)
|
||||
|
||||
with maintained_selection():
|
||||
rt.select(node)
|
||||
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": str(representation["_id"])
|
||||
})
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
node = rt.getNodeByName(container["instance_node"])
|
||||
rt.delete(node)
|
||||
68
openpype/hosts/max/plugins/load/load_model_obj.py
Normal file
68
openpype/hosts/max/plugins/load/load_model_obj.py
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
import os
|
||||
from openpype.pipeline import (
|
||||
load,
|
||||
get_representation_path
|
||||
)
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.lib import maintained_selection
|
||||
|
||||
|
||||
class ObjLoader(load.LoaderPlugin):
|
||||
"""Obj Loader"""
|
||||
|
||||
families = ["model"]
|
||||
representations = ["obj"]
|
||||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
filepath = os.path.normpath(self.fname)
|
||||
self.log.debug(f"Executing command to import..")
|
||||
|
||||
rt.execute(f'importFile @"{filepath}" #noPrompt using:ObjImp')
|
||||
# create "missing" container for obj import
|
||||
container = rt.container()
|
||||
container.name = f"{name}"
|
||||
|
||||
# get current selection
|
||||
for selection in rt.getCurrentSelection():
|
||||
selection.Parent = container
|
||||
|
||||
asset = rt.getNodeByName(f"{name}")
|
||||
|
||||
return containerise(
|
||||
name, [asset], context, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
path = get_representation_path(representation)
|
||||
node_name = container["instance_node"]
|
||||
node = rt.getNodeByName(node_name)
|
||||
|
||||
instance_name, _ = node_name.split("_")
|
||||
container = rt.getNodeByName(instance_name)
|
||||
for n in container.Children:
|
||||
rt.delete(n)
|
||||
|
||||
rt.execute(f'importFile @"{path}" #noPrompt using:ObjImp')
|
||||
# get current selection
|
||||
for selection in rt.getCurrentSelection():
|
||||
selection.Parent = container
|
||||
|
||||
with maintained_selection():
|
||||
rt.select(node)
|
||||
|
||||
lib.imprint(node_name, {
|
||||
"representation": str(representation["_id"])
|
||||
})
|
||||
|
||||
def remove(self, container):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
node = rt.getNodeByName(container["instance_node"])
|
||||
rt.delete(node)
|
||||
78
openpype/hosts/max/plugins/load/load_model_usd.py
Normal file
78
openpype/hosts/max/plugins/load/load_model_usd.py
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
import os
|
||||
from openpype.pipeline import (
|
||||
load, get_representation_path
|
||||
)
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.lib import maintained_selection
|
||||
|
||||
|
||||
class ModelUSDLoader(load.LoaderPlugin):
|
||||
"""Loading model with the USD loader."""
|
||||
|
||||
families = ["model"]
|
||||
label = "Load Model(USD)"
|
||||
representations = ["usda"]
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
# asset_filepath
|
||||
filepath = os.path.normpath(self.fname)
|
||||
import_options = rt.USDImporter.CreateOptions()
|
||||
base_filename = os.path.basename(filepath)
|
||||
filename, ext = os.path.splitext(base_filename)
|
||||
log_filepath = filepath.replace(ext, "txt")
|
||||
|
||||
rt.LogPath = log_filepath
|
||||
rt.LogLevel = rt.name('info')
|
||||
rt.USDImporter.importFile(filepath,
|
||||
importOptions=import_options)
|
||||
|
||||
asset = rt.getNodeByName(f"{name}")
|
||||
|
||||
return containerise(
|
||||
name, [asset], context, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
path = get_representation_path(representation)
|
||||
node_name = container["instance_node"]
|
||||
node = rt.getNodeByName(node_name)
|
||||
for n in node.Children:
|
||||
for r in n.Children:
|
||||
rt.delete(r)
|
||||
rt.delete(n)
|
||||
instance_name, _ = node_name.split("_")
|
||||
|
||||
import_options = rt.USDImporter.CreateOptions()
|
||||
base_filename = os.path.basename(path)
|
||||
_, ext = os.path.splitext(base_filename)
|
||||
log_filepath = path.replace(ext, "txt")
|
||||
|
||||
rt.LogPath = log_filepath
|
||||
rt.LogLevel = rt.name('info')
|
||||
rt.USDImporter.importFile(path,
|
||||
importOptions=import_options)
|
||||
|
||||
asset = rt.getNodeByName(f"{instance_name}")
|
||||
asset.Parent = node
|
||||
|
||||
with maintained_selection():
|
||||
rt.select(node)
|
||||
|
||||
lib.imprint(node_name, {
|
||||
"representation": str(representation["_id"])
|
||||
})
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
node = rt.getNodeByName(container["instance_node"])
|
||||
rt.delete(node)
|
||||
|
|
@ -15,8 +15,7 @@ from openpype.hosts.max.api import lib
|
|||
class AbcLoader(load.LoaderPlugin):
|
||||
"""Alembic loader."""
|
||||
|
||||
families = ["model",
|
||||
"camera",
|
||||
families = ["camera",
|
||||
"animation",
|
||||
"pointcache"]
|
||||
label = "Load Alembic"
|
||||
|
|
|
|||
|
|
@ -21,7 +21,8 @@ class ExtractMaxSceneRaw(publish.Extractor,
|
|||
label = "Extract Max Scene (Raw)"
|
||||
hosts = ["max"]
|
||||
families = ["camera",
|
||||
"maxScene"]
|
||||
"maxScene",
|
||||
"model"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
|
|
|||
74
openpype/hosts/max/plugins/publish/extract_model.py
Normal file
74
openpype/hosts/max/plugins/publish/extract_model.py
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from openpype.pipeline import (
|
||||
publish,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from pymxs import runtime as rt
|
||||
from openpype.hosts.max.api import (
|
||||
maintained_selection,
|
||||
get_all_children
|
||||
)
|
||||
|
||||
|
||||
class ExtractModel(publish.Extractor,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""
|
||||
Extract Geometry in Alembic Format
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder - 0.1
|
||||
label = "Extract Geometry (Alembic)"
|
||||
hosts = ["max"]
|
||||
families = ["model"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
container = instance.data["instance_node"]
|
||||
|
||||
self.log.info("Extracting Geometry ...")
|
||||
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = "{name}.abc".format(**instance.data)
|
||||
filepath = os.path.join(stagingdir, filename)
|
||||
|
||||
# We run the render
|
||||
self.log.info("Writing alembic '%s' to '%s'" % (filename,
|
||||
stagingdir))
|
||||
|
||||
export_cmd = (
|
||||
f"""
|
||||
AlembicExport.ArchiveType = #ogawa
|
||||
AlembicExport.CoordinateSystem = #maya
|
||||
AlembicExport.CustomAttributes = true
|
||||
AlembicExport.UVs = true
|
||||
AlembicExport.VertexColors = true
|
||||
AlembicExport.PreserveInstances = true
|
||||
|
||||
exportFile @"{filepath}" #noPrompt selectedOnly:on using:AlembicExport
|
||||
|
||||
""")
|
||||
|
||||
self.log.debug(f"Executing command: {export_cmd}")
|
||||
|
||||
with maintained_selection():
|
||||
# select and export
|
||||
rt.select(get_all_children(rt.getNodeByName(container)))
|
||||
rt.execute(export_cmd)
|
||||
|
||||
self.log.info("Performing Extraction ...")
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'abc',
|
||||
'ext': 'abc',
|
||||
'files': filename,
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
|
||||
filepath))
|
||||
74
openpype/hosts/max/plugins/publish/extract_model_fbx.py
Normal file
74
openpype/hosts/max/plugins/publish/extract_model_fbx.py
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from openpype.pipeline import (
|
||||
publish,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from pymxs import runtime as rt
|
||||
from openpype.hosts.max.api import (
|
||||
maintained_selection,
|
||||
get_all_children
|
||||
)
|
||||
|
||||
|
||||
class ExtractModelFbx(publish.Extractor,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""
|
||||
Extract Geometry in FBX Format
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder - 0.05
|
||||
label = "Extract FBX"
|
||||
hosts = ["max"]
|
||||
families = ["model"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
container = instance.data["instance_node"]
|
||||
|
||||
self.log.info("Extracting Geometry ...")
|
||||
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = "{name}.fbx".format(**instance.data)
|
||||
filepath = os.path.join(stagingdir,
|
||||
filename)
|
||||
self.log.info("Writing FBX '%s' to '%s'" % (filepath,
|
||||
stagingdir))
|
||||
|
||||
export_fbx_cmd = (
|
||||
f"""
|
||||
FBXExporterSetParam "Animation" false
|
||||
FBXExporterSetParam "Cameras" false
|
||||
FBXExporterSetParam "Lights" false
|
||||
FBXExporterSetParam "PointCache" false
|
||||
FBXExporterSetParam "AxisConversionMethod" "Animation"
|
||||
FbxExporterSetParam "UpAxis" "Y"
|
||||
FbxExporterSetParam "Preserveinstances" true
|
||||
|
||||
exportFile @"{filepath}" #noPrompt selectedOnly:true using:FBXEXP
|
||||
|
||||
""")
|
||||
|
||||
self.log.debug(f"Executing command: {export_fbx_cmd}")
|
||||
|
||||
with maintained_selection():
|
||||
# select and export
|
||||
rt.select(get_all_children(rt.getNodeByName(container)))
|
||||
rt.execute(export_fbx_cmd)
|
||||
|
||||
self.log.info("Performing Extraction ...")
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'fbx',
|
||||
'ext': 'fbx',
|
||||
'files': filename,
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
|
||||
filepath))
|
||||
59
openpype/hosts/max/plugins/publish/extract_model_obj.py
Normal file
59
openpype/hosts/max/plugins/publish/extract_model_obj.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from openpype.pipeline import (
|
||||
publish,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from pymxs import runtime as rt
|
||||
from openpype.hosts.max.api import (
|
||||
maintained_selection,
|
||||
get_all_children
|
||||
)
|
||||
|
||||
|
||||
class ExtractModelObj(publish.Extractor,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""
|
||||
Extract Geometry in OBJ Format
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder - 0.05
|
||||
label = "Extract OBJ"
|
||||
hosts = ["max"]
|
||||
families = ["model"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
container = instance.data["instance_node"]
|
||||
|
||||
self.log.info("Extracting Geometry ...")
|
||||
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = "{name}.obj".format(**instance.data)
|
||||
filepath = os.path.join(stagingdir,
|
||||
filename)
|
||||
self.log.info("Writing OBJ '%s' to '%s'" % (filepath,
|
||||
stagingdir))
|
||||
|
||||
with maintained_selection():
|
||||
# select and export
|
||||
rt.select(get_all_children(rt.getNodeByName(container)))
|
||||
rt.execute(f'exportFile @"{filepath}" #noPrompt selectedOnly:true using:ObjExp') # noqa
|
||||
|
||||
self.log.info("Performing Extraction ...")
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'obj',
|
||||
'ext': 'obj',
|
||||
'files': filename,
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
|
||||
filepath))
|
||||
114
openpype/hosts/max/plugins/publish/extract_model_usd.py
Normal file
114
openpype/hosts/max/plugins/publish/extract_model_usd.py
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from openpype.pipeline import (
|
||||
publish,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from pymxs import runtime as rt
|
||||
from openpype.hosts.max.api import (
|
||||
maintained_selection
|
||||
)
|
||||
|
||||
|
||||
class ExtractModelUSD(publish.Extractor,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""
|
||||
Extract Geometry in USDA Format
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder - 0.05
|
||||
label = "Extract Geometry (USD)"
|
||||
hosts = ["max"]
|
||||
families = ["model"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
container = instance.data["instance_node"]
|
||||
|
||||
self.log.info("Extracting Geometry ...")
|
||||
|
||||
stagingdir = self.staging_dir(instance)
|
||||
asset_filename = "{name}.usda".format(**instance.data)
|
||||
asset_filepath = os.path.join(stagingdir,
|
||||
asset_filename)
|
||||
self.log.info("Writing USD '%s' to '%s'" % (asset_filepath,
|
||||
stagingdir))
|
||||
|
||||
log_filename = "{name}.txt".format(**instance.data)
|
||||
log_filepath = os.path.join(stagingdir,
|
||||
log_filename)
|
||||
self.log.info("Writing log '%s' to '%s'" % (log_filepath,
|
||||
stagingdir))
|
||||
|
||||
# get the nodes which need to be exported
|
||||
export_options = self.get_export_options(log_filepath)
|
||||
with maintained_selection():
|
||||
# select and export
|
||||
node_list = self.get_node_list(container)
|
||||
rt.USDExporter.ExportFile(asset_filepath,
|
||||
exportOptions=export_options,
|
||||
contentSource=rt.name("selected"),
|
||||
nodeList=node_list)
|
||||
|
||||
self.log.info("Performing Extraction ...")
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'usda',
|
||||
'ext': 'usda',
|
||||
'files': asset_filename,
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
log_representation = {
|
||||
'name': 'txt',
|
||||
'ext': 'txt',
|
||||
'files': log_filename,
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
instance.data["representations"].append(log_representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
|
||||
asset_filepath))
|
||||
|
||||
def get_node_list(self, container):
|
||||
"""
|
||||
Get the target nodes which are
|
||||
the children of the container
|
||||
"""
|
||||
node_list = []
|
||||
|
||||
container_node = rt.getNodeByName(container)
|
||||
target_node = container_node.Children
|
||||
rt.select(target_node)
|
||||
for sel in rt.selection:
|
||||
node_list.append(sel)
|
||||
|
||||
return node_list
|
||||
|
||||
def get_export_options(self, log_path):
|
||||
"""Set Export Options for USD Exporter"""
|
||||
|
||||
export_options = rt.USDExporter.createOptions()
|
||||
|
||||
export_options.Meshes = True
|
||||
export_options.Shapes = False
|
||||
export_options.Lights = False
|
||||
export_options.Cameras = False
|
||||
export_options.Materials = False
|
||||
export_options.MeshFormat = rt.name('fromScene')
|
||||
export_options.FileFormat = rt.name('ascii')
|
||||
export_options.UpAxis = rt.name('y')
|
||||
export_options.LogLevel = rt.name('info')
|
||||
export_options.LogPath = log_path
|
||||
export_options.PreserveEdgeOrientation = True
|
||||
export_options.TimeMode = rt.name('current')
|
||||
|
||||
rt.USDexporter.UIOptions = export_options
|
||||
|
||||
return export_options
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
class ValidateModelContent(pyblish.api.InstancePlugin):
|
||||
"""Validates Model instance contents.
|
||||
|
||||
A model instance may only hold either geometry-related
|
||||
object(excluding Shapes) or editable meshes.
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["model"]
|
||||
hosts = ["max"]
|
||||
label = "Model Contents"
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError("Model instance must only include"
|
||||
"Geometry and Editable Mesh")
|
||||
|
||||
def get_invalid(self, instance):
|
||||
"""
|
||||
Get invalid nodes if the instance is not camera
|
||||
"""
|
||||
invalid = list()
|
||||
container = instance.data["instance_node"]
|
||||
self.log.info("Validating look content for "
|
||||
"{}".format(container))
|
||||
|
||||
con = rt.getNodeByName(container)
|
||||
selection_list = list(con.Children) or rt.getCurrentSelection()
|
||||
for sel in selection_list:
|
||||
if rt.classOf(sel) in rt.Camera.classes:
|
||||
invalid.append(sel)
|
||||
if rt.classOf(sel) in rt.Light.classes:
|
||||
invalid.append(sel)
|
||||
if rt.classOf(sel) in rt.Shape.classes:
|
||||
invalid.append(sel)
|
||||
|
||||
return invalid
|
||||
36
openpype/hosts/max/plugins/publish/validate_usd_plugin.py
Normal file
36
openpype/hosts/max/plugins/publish/validate_usd_plugin.py
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
class ValidateUSDPlugin(pyblish.api.InstancePlugin):
|
||||
"""Validates if USD plugin is installed or loaded in Max
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder - 0.01
|
||||
families = ["model"]
|
||||
hosts = ["max"]
|
||||
label = "USD Plugin"
|
||||
|
||||
def process(self, instance):
|
||||
plugin_mgr = rt.pluginManager
|
||||
plugin_count = plugin_mgr.pluginDllCount
|
||||
plugin_info = self.get_plugins(plugin_mgr,
|
||||
plugin_count)
|
||||
usd_import = "usdimport.dli"
|
||||
if usd_import not in plugin_info:
|
||||
raise PublishValidationError("USD Plugin {}"
|
||||
" not found".format(usd_import))
|
||||
usd_export = "usdexport.dle"
|
||||
if usd_export not in plugin_info:
|
||||
raise PublishValidationError("USD Plugin {}"
|
||||
" not found".format(usd_export))
|
||||
|
||||
def get_plugins(self, manager, count):
|
||||
plugin_info_list = list()
|
||||
for p in range(1, count + 1):
|
||||
plugin_info = manager.pluginDllName(p)
|
||||
plugin_info_list.append(plugin_info)
|
||||
|
||||
return plugin_info_list
|
||||
|
|
@ -162,9 +162,15 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
|||
with parent_nodes(roots, parent=None):
|
||||
cmds.xform(group_name, zeroTransformPivots=True)
|
||||
|
||||
cmds.setAttr("{}.displayHandle".format(group_name), 1)
|
||||
|
||||
settings = get_project_settings(os.environ['AVALON_PROJECT'])
|
||||
|
||||
display_handle = settings['maya']['load'].get(
|
||||
'reference_loader', {}
|
||||
).get('display_handle', True)
|
||||
cmds.setAttr(
|
||||
"{}.displayHandle".format(group_name), display_handle
|
||||
)
|
||||
|
||||
colors = settings['maya']['load']['colors']
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
|
|
@ -174,7 +180,9 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
|||
(float(c[1]) / 255),
|
||||
(float(c[2]) / 255))
|
||||
|
||||
cmds.setAttr("{}.displayHandle".format(group_name), 1)
|
||||
cmds.setAttr(
|
||||
"{}.displayHandle".format(group_name), display_handle
|
||||
)
|
||||
# get bounding box
|
||||
bbox = cmds.exactWorldBoundingBox(group_name)
|
||||
# get pivot position on world space
|
||||
|
|
|
|||
|
|
@ -217,7 +217,11 @@ class ExtractPlayblast(publish.Extractor):
|
|||
instance.data["panel"], edit=True, **viewport_defaults
|
||||
)
|
||||
|
||||
cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom)
|
||||
try:
|
||||
cmds.setAttr(
|
||||
"{}.panZoomEnabled".format(preset["camera"]), pan_zoom)
|
||||
except RuntimeError:
|
||||
self.log.warning("Cannot restore Pan/Zoom settings.")
|
||||
|
||||
collected_files = os.listdir(stagingdir)
|
||||
patterns = [clique.PATTERNS["frames"]]
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import pyblish.api
|
|||
|
||||
from openpype.hosts.maya.api.lib import set_attribute
|
||||
from openpype.pipeline.publish import (
|
||||
RepairContextAction,
|
||||
RepairAction,
|
||||
ValidateContentsOrder,
|
||||
)
|
||||
|
||||
|
|
@ -26,7 +26,7 @@ class ValidateAttributes(pyblish.api.InstancePlugin):
|
|||
order = ValidateContentsOrder
|
||||
label = "Attributes"
|
||||
hosts = ["maya"]
|
||||
actions = [RepairContextAction]
|
||||
actions = [RepairAction]
|
||||
optional = True
|
||||
|
||||
attributes = None
|
||||
|
|
@ -81,7 +81,7 @@ class ValidateAttributes(pyblish.api.InstancePlugin):
|
|||
if node_name not in attributes:
|
||||
continue
|
||||
|
||||
for attr_name, expected in attributes.items():
|
||||
for attr_name, expected in attributes[node_name].items():
|
||||
|
||||
# Skip if attribute does not exist
|
||||
if not cmds.attributeQuery(attr_name, node=node, exists=True):
|
||||
|
|
|
|||
10
openpype/hosts/substancepainter/__init__.py
Normal file
10
openpype/hosts/substancepainter/__init__.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
from .addon import (
|
||||
SubstanceAddon,
|
||||
SUBSTANCE_HOST_DIR,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"SubstanceAddon",
|
||||
"SUBSTANCE_HOST_DIR"
|
||||
)
|
||||
34
openpype/hosts/substancepainter/addon.py
Normal file
34
openpype/hosts/substancepainter/addon.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
import os
|
||||
from openpype.modules import OpenPypeModule, IHostAddon
|
||||
|
||||
SUBSTANCE_HOST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class SubstanceAddon(OpenPypeModule, IHostAddon):
|
||||
name = "substancepainter"
|
||||
host_name = "substancepainter"
|
||||
|
||||
def initialize(self, module_settings):
|
||||
self.enabled = True
|
||||
|
||||
def add_implementation_envs(self, env, _app):
|
||||
# Add requirements to SUBSTANCE_PAINTER_PLUGINS_PATH
|
||||
plugin_path = os.path.join(SUBSTANCE_HOST_DIR, "deploy")
|
||||
plugin_path = plugin_path.replace("\\", "/")
|
||||
if env.get("SUBSTANCE_PAINTER_PLUGINS_PATH"):
|
||||
plugin_path += os.pathsep + env["SUBSTANCE_PAINTER_PLUGINS_PATH"]
|
||||
|
||||
env["SUBSTANCE_PAINTER_PLUGINS_PATH"] = plugin_path
|
||||
|
||||
# Log in Substance Painter doesn't support custom terminal colors
|
||||
env["OPENPYPE_LOG_NO_COLORS"] = "Yes"
|
||||
|
||||
def get_launch_hook_paths(self, app):
|
||||
if app.host_name != self.host_name:
|
||||
return []
|
||||
return [
|
||||
os.path.join(SUBSTANCE_HOST_DIR, "hooks")
|
||||
]
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".spp", ".toc"]
|
||||
8
openpype/hosts/substancepainter/api/__init__.py
Normal file
8
openpype/hosts/substancepainter/api/__init__.py
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
from .pipeline import (
|
||||
SubstanceHost,
|
||||
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"SubstanceHost",
|
||||
]
|
||||
157
openpype/hosts/substancepainter/api/colorspace.py
Normal file
157
openpype/hosts/substancepainter/api/colorspace.py
Normal file
|
|
@ -0,0 +1,157 @@
|
|||
"""Substance Painter OCIO management
|
||||
|
||||
Adobe Substance 3D Painter supports OCIO color management using a per project
|
||||
configuration. Output color spaces are defined at the project level
|
||||
|
||||
More information see:
|
||||
- https://substance3d.adobe.com/documentation/spdoc/color-management-223053233.html # noqa
|
||||
- https://substance3d.adobe.com/documentation/spdoc/color-management-with-opencolorio-225969419.html # noqa
|
||||
|
||||
"""
|
||||
import substance_painter.export
|
||||
import substance_painter.js
|
||||
import json
|
||||
|
||||
from .lib import (
|
||||
get_document_structure,
|
||||
get_channel_format
|
||||
)
|
||||
|
||||
|
||||
def _iter_document_stack_channels():
|
||||
"""Yield all stack paths and channels project"""
|
||||
|
||||
for material in get_document_structure()["materials"]:
|
||||
material_name = material["name"]
|
||||
for stack in material["stacks"]:
|
||||
stack_name = stack["name"]
|
||||
if stack_name:
|
||||
stack_path = [material_name, stack_name]
|
||||
else:
|
||||
stack_path = material_name
|
||||
for channel in stack["channels"]:
|
||||
yield stack_path, channel
|
||||
|
||||
|
||||
def _get_first_color_and_data_stack_and_channel():
|
||||
"""Return first found color channel and data channel."""
|
||||
color_channel = None
|
||||
data_channel = None
|
||||
for stack_path, channel in _iter_document_stack_channels():
|
||||
channel_format = get_channel_format(stack_path, channel)
|
||||
if channel_format["color"]:
|
||||
color_channel = (stack_path, channel)
|
||||
else:
|
||||
data_channel = (stack_path, channel)
|
||||
|
||||
if color_channel and data_channel:
|
||||
return color_channel, data_channel
|
||||
|
||||
return color_channel, data_channel
|
||||
|
||||
|
||||
def get_project_channel_data():
|
||||
"""Return colorSpace settings for the current substance painter project.
|
||||
|
||||
In Substance Painter only color channels have Color Management enabled
|
||||
whereas data channels have no color management applied. This can't be
|
||||
changed. The artist can only customize the export color space for color
|
||||
channels per bit-depth for 8 bpc, 16 bpc and 32 bpc.
|
||||
|
||||
As such this returns the color space for 'data' and for per bit-depth
|
||||
for color channels.
|
||||
|
||||
Example output:
|
||||
{
|
||||
"data": {'colorSpace': 'Utility - Raw'},
|
||||
"8": {"colorSpace": "ACES - AcesCG"},
|
||||
"16": {"colorSpace": "ACES - AcesCG"},
|
||||
"16f": {"colorSpace": "ACES - AcesCG"},
|
||||
"32f": {"colorSpace": "ACES - AcesCG"}
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
keys = ["colorSpace"]
|
||||
query = {key: f"${key}" for key in keys}
|
||||
|
||||
config = {
|
||||
"exportPath": "/",
|
||||
"exportShaderParams": False,
|
||||
"defaultExportPreset": "query_preset",
|
||||
|
||||
"exportPresets": [{
|
||||
"name": "query_preset",
|
||||
|
||||
# List of maps making up this export preset.
|
||||
"maps": [{
|
||||
"fileName": json.dumps(query),
|
||||
# List of source/destination defining which channels will
|
||||
# make up the texture file.
|
||||
"channels": [],
|
||||
"parameters": {
|
||||
"fileFormat": "exr",
|
||||
"bitDepth": "32f",
|
||||
"dithering": False,
|
||||
"sizeLog2": 4,
|
||||
"paddingAlgorithm": "passthrough",
|
||||
"dilationDistance": 16
|
||||
}
|
||||
}]
|
||||
}],
|
||||
}
|
||||
|
||||
def _get_query_output(config):
|
||||
# Return the basename of the single output path we defined
|
||||
result = substance_painter.export.list_project_textures(config)
|
||||
path = next(iter(result.values()))[0]
|
||||
# strip extension and slash since we know relevant json data starts
|
||||
# and ends with { and } characters
|
||||
path = path.strip("/\\.exr")
|
||||
return json.loads(path)
|
||||
|
||||
# Query for each type of channel (color and data)
|
||||
color_channel, data_channel = _get_first_color_and_data_stack_and_channel()
|
||||
colorspaces = {}
|
||||
for key, channel_data in {
|
||||
"data": data_channel,
|
||||
"color": color_channel
|
||||
}.items():
|
||||
if channel_data is None:
|
||||
# No channel of that datatype anywhere in the Stack. We're
|
||||
# unable to identify the output color space of the project
|
||||
colorspaces[key] = None
|
||||
continue
|
||||
|
||||
stack, channel = channel_data
|
||||
|
||||
# Stack must be a string
|
||||
if not isinstance(stack, str):
|
||||
# Assume iterable
|
||||
stack = "/".join(stack)
|
||||
|
||||
# Define the temp output config
|
||||
config["exportList"] = [{"rootPath": stack}]
|
||||
config_map = config["exportPresets"][0]["maps"][0]
|
||||
config_map["channels"] = [
|
||||
{
|
||||
"destChannel": x,
|
||||
"srcChannel": x,
|
||||
"srcMapType": "documentMap",
|
||||
"srcMapName": channel
|
||||
} for x in "RGB"
|
||||
]
|
||||
|
||||
if key == "color":
|
||||
# Query for each bit depth
|
||||
# Color space definition can have a different OCIO config set
|
||||
# for 8-bit, 16-bit and 32-bit outputs so we need to check each
|
||||
# bit depth
|
||||
for depth in ["8", "16", "16f", "32f"]:
|
||||
config_map["parameters"]["bitDepth"] = depth # noqa
|
||||
colorspaces[key + depth] = _get_query_output(config)
|
||||
else:
|
||||
# Data channel (not color managed)
|
||||
colorspaces[key] = _get_query_output(config)
|
||||
|
||||
return colorspaces
|
||||
649
openpype/hosts/substancepainter/api/lib.py
Normal file
649
openpype/hosts/substancepainter/api/lib.py
Normal file
|
|
@ -0,0 +1,649 @@
|
|||
import os
|
||||
import re
|
||||
import json
|
||||
from collections import defaultdict
|
||||
|
||||
import substance_painter.project
|
||||
import substance_painter.resource
|
||||
import substance_painter.js
|
||||
import substance_painter.export
|
||||
|
||||
from qtpy import QtGui, QtWidgets, QtCore
|
||||
|
||||
|
||||
def get_export_presets():
|
||||
"""Return Export Preset resource URLs for all available Export Presets.
|
||||
|
||||
Returns:
|
||||
dict: {Resource url: GUI Label}
|
||||
|
||||
"""
|
||||
# TODO: Find more optimal way to find all export templates
|
||||
|
||||
preset_resources = {}
|
||||
for shelf in substance_painter.resource.Shelves.all():
|
||||
shelf_path = os.path.normpath(shelf.path())
|
||||
|
||||
presets_path = os.path.join(shelf_path, "export-presets")
|
||||
if not os.path.exists(presets_path):
|
||||
continue
|
||||
|
||||
for filename in os.listdir(presets_path):
|
||||
if filename.endswith(".spexp"):
|
||||
template_name = os.path.splitext(filename)[0]
|
||||
|
||||
resource = substance_painter.resource.ResourceID(
|
||||
context=shelf.name(),
|
||||
name=template_name
|
||||
)
|
||||
resource_url = resource.url()
|
||||
|
||||
preset_resources[resource_url] = template_name
|
||||
|
||||
# Sort by template name
|
||||
export_templates = dict(sorted(preset_resources.items(),
|
||||
key=lambda x: x[1]))
|
||||
|
||||
# Add default built-ins at the start
|
||||
# TODO: find the built-ins automatically; scraped with https://gist.github.com/BigRoy/97150c7c6f0a0c916418207b9a2bc8f1 # noqa
|
||||
result = {
|
||||
"export-preset-generator://viewport2d": "2D View", # noqa
|
||||
"export-preset-generator://doc-channel-normal-no-alpha": "Document channels + Normal + AO (No Alpha)", # noqa
|
||||
"export-preset-generator://doc-channel-normal-with-alpha": "Document channels + Normal + AO (With Alpha)", # noqa
|
||||
"export-preset-generator://sketchfab": "Sketchfab", # noqa
|
||||
"export-preset-generator://adobe-standard-material": "Substance 3D Stager", # noqa
|
||||
"export-preset-generator://usd": "USD PBR Metal Roughness", # noqa
|
||||
"export-preset-generator://gltf": "glTF PBR Metal Roughness", # noqa
|
||||
"export-preset-generator://gltf-displacement": "glTF PBR Metal Roughness + Displacement texture (experimental)" # noqa
|
||||
}
|
||||
result.update(export_templates)
|
||||
return result
|
||||
|
||||
|
||||
def _convert_stack_path_to_cmd_str(stack_path):
|
||||
"""Convert stack path `str` or `[str, str]` for javascript query
|
||||
|
||||
Example usage:
|
||||
>>> stack_path = _convert_stack_path_to_cmd_str(stack_path)
|
||||
>>> cmd = f"alg.mapexport.channelIdentifiers({stack_path})"
|
||||
>>> substance_painter.js.evaluate(cmd)
|
||||
|
||||
Args:
|
||||
stack_path (list or str): Path to the stack, could be
|
||||
"Texture set name" or ["Texture set name", "Stack name"]
|
||||
|
||||
Returns:
|
||||
str: Stack path usable as argument in javascript query.
|
||||
|
||||
"""
|
||||
return json.dumps(stack_path)
|
||||
|
||||
|
||||
def get_channel_identifiers(stack_path=None):
|
||||
"""Return the list of channel identifiers.
|
||||
|
||||
If a context is passed (texture set/stack),
|
||||
return only used channels with resolved user channels.
|
||||
|
||||
Channel identifiers are:
|
||||
basecolor, height, specular, opacity, emissive, displacement,
|
||||
glossiness, roughness, anisotropylevel, anisotropyangle, transmissive,
|
||||
scattering, reflection, ior, metallic, normal, ambientOcclusion,
|
||||
diffuse, specularlevel, blendingmask, [custom user names].
|
||||
|
||||
Args:
|
||||
stack_path (list or str, Optional): Path to the stack, could be
|
||||
"Texture set name" or ["Texture set name", "Stack name"]
|
||||
|
||||
Returns:
|
||||
list: List of channel identifiers.
|
||||
|
||||
"""
|
||||
if stack_path is None:
|
||||
stack_path = ""
|
||||
else:
|
||||
stack_path = _convert_stack_path_to_cmd_str(stack_path)
|
||||
cmd = f"alg.mapexport.channelIdentifiers({stack_path})"
|
||||
return substance_painter.js.evaluate(cmd)
|
||||
|
||||
|
||||
def get_channel_format(stack_path, channel):
|
||||
"""Retrieve the channel format of a specific stack channel.
|
||||
|
||||
See `alg.mapexport.channelFormat` (javascript API) for more details.
|
||||
|
||||
The channel format data is:
|
||||
"label" (str): The channel format label: could be one of
|
||||
[sRGB8, L8, RGB8, L16, RGB16, L16F, RGB16F, L32F, RGB32F]
|
||||
"color" (bool): True if the format is in color, False is grayscale
|
||||
"floating" (bool): True if the format uses floating point
|
||||
representation, false otherwise
|
||||
"bitDepth" (int): Bit per color channel (could be 8, 16 or 32 bpc)
|
||||
|
||||
Arguments:
|
||||
stack_path (list or str): Path to the stack, could be
|
||||
"Texture set name" or ["Texture set name", "Stack name"]
|
||||
channel (str): Identifier of the channel to export
|
||||
(see `get_channel_identifiers`)
|
||||
|
||||
Returns:
|
||||
dict: The channel format data.
|
||||
|
||||
"""
|
||||
stack_path = _convert_stack_path_to_cmd_str(stack_path)
|
||||
cmd = f"alg.mapexport.channelFormat({stack_path}, '{channel}')"
|
||||
return substance_painter.js.evaluate(cmd)
|
||||
|
||||
|
||||
def get_document_structure():
|
||||
"""Dump the document structure.
|
||||
|
||||
See `alg.mapexport.documentStructure` (javascript API) for more details.
|
||||
|
||||
Returns:
|
||||
dict: Document structure or None when no project is open
|
||||
|
||||
"""
|
||||
return substance_painter.js.evaluate("alg.mapexport.documentStructure()")
|
||||
|
||||
|
||||
def get_export_templates(config, format="png", strip_folder=True):
|
||||
"""Return export config outputs.
|
||||
|
||||
This use the Javascript API `alg.mapexport.getPathsExportDocumentMaps`
|
||||
which returns a different output than using the Python equivalent
|
||||
`substance_painter.export.list_project_textures(config)`.
|
||||
|
||||
The nice thing about the Javascript API version is that it returns the
|
||||
output textures grouped by filename template.
|
||||
|
||||
A downside is that it doesn't return all the UDIM tiles but per template
|
||||
always returns a single file.
|
||||
|
||||
Note:
|
||||
The file format needs to be explicitly passed to the Javascript API
|
||||
but upon exporting through the Python API the file format can be based
|
||||
on the output preset. So it's likely the file extension will mismatch
|
||||
|
||||
Warning:
|
||||
Even though the function appears to solely get the expected outputs
|
||||
the Javascript API will actually create the config's texture output
|
||||
folder if it does not exist yet. As such, a valid path must be set.
|
||||
|
||||
Example output:
|
||||
{
|
||||
"DefaultMaterial": {
|
||||
"$textureSet_BaseColor(_$colorSpace)(.$udim)": "DefaultMaterial_BaseColor_ACES - ACEScg.1002.png", # noqa
|
||||
"$textureSet_Emissive(_$colorSpace)(.$udim)": "DefaultMaterial_Emissive_ACES - ACEScg.1002.png", # noqa
|
||||
"$textureSet_Height(_$colorSpace)(.$udim)": "DefaultMaterial_Height_Utility - Raw.1002.png", # noqa
|
||||
"$textureSet_Metallic(_$colorSpace)(.$udim)": "DefaultMaterial_Metallic_Utility - Raw.1002.png", # noqa
|
||||
"$textureSet_Normal(_$colorSpace)(.$udim)": "DefaultMaterial_Normal_Utility - Raw.1002.png", # noqa
|
||||
"$textureSet_Roughness(_$colorSpace)(.$udim)": "DefaultMaterial_Roughness_Utility - Raw.1002.png" # noqa
|
||||
}
|
||||
}
|
||||
|
||||
Arguments:
|
||||
config (dict) Export config
|
||||
format (str, Optional): Output format to write to, defaults to 'png'
|
||||
strip_folder (bool, Optional): Whether to strip the output folder
|
||||
from the output filenames.
|
||||
|
||||
Returns:
|
||||
dict: The expected output maps.
|
||||
|
||||
"""
|
||||
folder = config["exportPath"].replace("\\", "/")
|
||||
preset = config["defaultExportPreset"]
|
||||
cmd = f'alg.mapexport.getPathsExportDocumentMaps("{preset}", "{folder}", "{format}")' # noqa
|
||||
result = substance_painter.js.evaluate(cmd)
|
||||
|
||||
if strip_folder:
|
||||
for _stack, maps in result.items():
|
||||
for map_template, map_filepath in maps.items():
|
||||
map_filepath = map_filepath.replace("\\", "/")
|
||||
assert map_filepath.startswith(folder)
|
||||
map_filename = map_filepath[len(folder):].lstrip("/")
|
||||
maps[map_template] = map_filename
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _templates_to_regex(templates,
|
||||
texture_set,
|
||||
colorspaces,
|
||||
project,
|
||||
mesh):
|
||||
"""Return regex based on a Substance Painter expot filename template.
|
||||
|
||||
This converts Substance Painter export filename templates like
|
||||
`$mesh_$textureSet_BaseColor(_$colorSpace)(.$udim)` into a regex
|
||||
which can be used to query an output filename to help retrieve:
|
||||
|
||||
- Which template filename the file belongs to.
|
||||
- Which color space the file is written with.
|
||||
- Which udim tile it is exactly.
|
||||
|
||||
This is used by `get_parsed_export_maps` which tries to as explicitly
|
||||
as possible match the filename pattern against the known possible outputs.
|
||||
That's why Texture Set name, Color spaces, Project path and mesh path must
|
||||
be provided. By doing so we get the best shot at correctly matching the
|
||||
right template because otherwise $texture_set could basically be any string
|
||||
and thus match even that of a color space or mesh.
|
||||
|
||||
Arguments:
|
||||
templates (list): List of templates to convert to regex.
|
||||
texture_set (str): The texture set to match against.
|
||||
colorspaces (list): The colorspaces defined in the current project.
|
||||
project (str): Filepath of current substance project.
|
||||
mesh (str): Path to mesh file used in current project.
|
||||
|
||||
Returns:
|
||||
dict: Template: Template regex pattern
|
||||
|
||||
"""
|
||||
def _filename_no_ext(path):
|
||||
return os.path.splitext(os.path.basename(path))[0]
|
||||
|
||||
if colorspaces and any(colorspaces):
|
||||
colorspace_match = "|".join(re.escape(c) for c in set(colorspaces))
|
||||
colorspace_match = f"({colorspace_match})"
|
||||
else:
|
||||
# No colorspace support enabled
|
||||
colorspace_match = ""
|
||||
|
||||
# Key to regex valid search values
|
||||
key_matches = {
|
||||
"$project": re.escape(_filename_no_ext(project)),
|
||||
"$mesh": re.escape(_filename_no_ext(mesh)),
|
||||
"$textureSet": re.escape(texture_set),
|
||||
"$colorSpace": colorspace_match,
|
||||
"$udim": "([0-9]{4})"
|
||||
}
|
||||
|
||||
# Turn the templates into regexes
|
||||
regexes = {}
|
||||
for template in templates:
|
||||
|
||||
# We need to tweak a temp
|
||||
search_regex = re.escape(template)
|
||||
|
||||
# Let's assume that any ( and ) character in the file template was
|
||||
# intended as an optional template key and do a simple `str.replace`
|
||||
# Note: we are matching against re.escape(template) so will need to
|
||||
# search for the escaped brackets.
|
||||
search_regex = search_regex.replace(re.escape("("), "(")
|
||||
search_regex = search_regex.replace(re.escape(")"), ")?")
|
||||
|
||||
# Substitute each key into a named group
|
||||
for key, key_expected_regex in key_matches.items():
|
||||
|
||||
# We want to use the template as a regex basis in the end so will
|
||||
# escape the whole thing first. Note that thus we'll need to
|
||||
# search for the escaped versions of the keys too.
|
||||
escaped_key = re.escape(key)
|
||||
key_label = key[1:] # key without $ prefix
|
||||
|
||||
key_expected_grp_regex = f"(?P<{key_label}>{key_expected_regex})"
|
||||
search_regex = search_regex.replace(escaped_key,
|
||||
key_expected_grp_regex)
|
||||
|
||||
# The filename templates don't include the extension so we add it
|
||||
# to be able to match the out filename beginning to end
|
||||
ext_regex = r"(?P<ext>\.[A-Za-z][A-Za-z0-9-]*)"
|
||||
search_regex = rf"^{search_regex}{ext_regex}$"
|
||||
|
||||
regexes[template] = search_regex
|
||||
|
||||
return regexes
|
||||
|
||||
|
||||
def strip_template(template, strip="._ "):
|
||||
"""Return static characters in a substance painter filename template.
|
||||
|
||||
>>> strip_template("$textureSet_HELLO(.$udim)")
|
||||
# HELLO
|
||||
>>> strip_template("$mesh_$textureSet_HELLO_WORLD_$colorSpace(.$udim)")
|
||||
# HELLO_WORLD
|
||||
>>> strip_template("$textureSet_HELLO(.$udim)", strip=None)
|
||||
# _HELLO
|
||||
>>> strip_template("$mesh_$textureSet_$colorSpace(.$udim)", strip=None)
|
||||
# _HELLO_
|
||||
>>> strip_template("$textureSet_HELLO(.$udim)")
|
||||
# _HELLO
|
||||
|
||||
Arguments:
|
||||
template (str): Filename template to strip.
|
||||
strip (str, optional): Characters to strip from beginning and end
|
||||
of the static string in template. Defaults to: `._ `.
|
||||
|
||||
Returns:
|
||||
str: The static string in filename template.
|
||||
|
||||
"""
|
||||
# Return only characters that were part of the template that were static.
|
||||
# Remove all keys
|
||||
keys = ["$project", "$mesh", "$textureSet", "$udim", "$colorSpace"]
|
||||
stripped_template = template
|
||||
for key in keys:
|
||||
stripped_template = stripped_template.replace(key, "")
|
||||
|
||||
# Everything inside an optional bracket space is excluded since it's not
|
||||
# static. We keep a counter to track whether we are currently iterating
|
||||
# over parts of the template that are inside an 'optional' group or not.
|
||||
counter = 0
|
||||
result = ""
|
||||
for char in stripped_template:
|
||||
if char == "(":
|
||||
counter += 1
|
||||
elif char == ")":
|
||||
counter -= 1
|
||||
if counter < 0:
|
||||
counter = 0
|
||||
else:
|
||||
if counter == 0:
|
||||
result += char
|
||||
|
||||
if strip:
|
||||
# Strip of any trailing start/end characters. Technically these are
|
||||
# static but usually start and end separators like space or underscore
|
||||
# aren't wanted.
|
||||
result = result.strip(strip)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_parsed_export_maps(config):
|
||||
"""Return Export Config's expected output textures with parsed data.
|
||||
|
||||
This tries to parse the texture outputs using a Python API export config.
|
||||
|
||||
Parses template keys: $project, $mesh, $textureSet, $colorSpace, $udim
|
||||
|
||||
Example:
|
||||
{("DefaultMaterial", ""): {
|
||||
"$mesh_$textureSet_BaseColor(_$colorSpace)(.$udim)": [
|
||||
{
|
||||
// OUTPUT DATA FOR FILE #1 OF THE TEMPLATE
|
||||
},
|
||||
{
|
||||
// OUTPUT DATA FOR FILE #2 OF THE TEMPLATE
|
||||
},
|
||||
]
|
||||
},
|
||||
}}
|
||||
|
||||
File output data (all outputs are `str`).
|
||||
1) Parsed tokens: These are parsed tokens from the template, they will
|
||||
only exist if found in the filename template and output filename.
|
||||
|
||||
project: Workfile filename without extension
|
||||
mesh: Filename of the loaded mesh without extension
|
||||
textureSet: The texture set, e.g. "DefaultMaterial",
|
||||
colorSpace: The color space, e.g. "ACES - ACEScg",
|
||||
udim: The udim tile, e.g. "1001"
|
||||
|
||||
2) Template output and filepath
|
||||
|
||||
filepath: Full path to the resulting texture map, e.g.
|
||||
"/path/to/mesh_DefaultMaterial_BaseColor_ACES - ACEScg.1002.png",
|
||||
output: "mesh_DefaultMaterial_BaseColor_ACES - ACEScg.1002.png"
|
||||
Note: if template had slashes (folders) then `output` will too.
|
||||
So `output` might include a folder.
|
||||
|
||||
Returns:
|
||||
dict: [texture_set, stack]: {template: [file1_data, file2_data]}
|
||||
|
||||
"""
|
||||
# Import is here to avoid recursive lib <-> colorspace imports
|
||||
from .colorspace import get_project_channel_data
|
||||
|
||||
outputs = substance_painter.export.list_project_textures(config)
|
||||
templates = get_export_templates(config, strip_folder=False)
|
||||
|
||||
# Get all color spaces set for the current project
|
||||
project_colorspaces = set(
|
||||
data["colorSpace"] for data in get_project_channel_data().values()
|
||||
)
|
||||
|
||||
# Get current project mesh path and project path to explicitly match
|
||||
# the $mesh and $project tokens
|
||||
project_mesh_path = substance_painter.project.last_imported_mesh_path()
|
||||
project_path = substance_painter.project.file_path()
|
||||
|
||||
# Get the current export path to strip this of the beginning of filepath
|
||||
# results, since filename templates don't have these we'll match without
|
||||
# that part of the filename.
|
||||
export_path = config["exportPath"]
|
||||
export_path = export_path.replace("\\", "/")
|
||||
if not export_path.endswith("/"):
|
||||
export_path += "/"
|
||||
|
||||
# Parse the outputs
|
||||
result = {}
|
||||
for key, filepaths in outputs.items():
|
||||
texture_set, stack = key
|
||||
|
||||
if stack:
|
||||
stack_path = f"{texture_set}/{stack}"
|
||||
else:
|
||||
stack_path = texture_set
|
||||
|
||||
stack_templates = list(templates[stack_path].keys())
|
||||
|
||||
template_regex = _templates_to_regex(stack_templates,
|
||||
texture_set=texture_set,
|
||||
colorspaces=project_colorspaces,
|
||||
mesh=project_mesh_path,
|
||||
project=project_path)
|
||||
|
||||
# Let's precompile the regexes
|
||||
for template, regex in template_regex.items():
|
||||
template_regex[template] = re.compile(regex)
|
||||
|
||||
stack_results = defaultdict(list)
|
||||
for filepath in sorted(filepaths):
|
||||
# We strip explicitly using the full parent export path instead of
|
||||
# using `os.path.basename` because export template is allowed to
|
||||
# have subfolders in its template which we want to match against
|
||||
filepath = filepath.replace("\\", "/")
|
||||
assert filepath.startswith(export_path), (
|
||||
f"Filepath {filepath} must start with folder {export_path}"
|
||||
)
|
||||
filename = filepath[len(export_path):]
|
||||
|
||||
for template, regex in template_regex.items():
|
||||
match = regex.match(filename)
|
||||
if match:
|
||||
parsed = match.groupdict(default={})
|
||||
|
||||
# Include some special outputs for convenience
|
||||
parsed["filepath"] = filepath
|
||||
parsed["output"] = filename
|
||||
|
||||
stack_results[template].append(parsed)
|
||||
break
|
||||
else:
|
||||
raise ValueError(f"Unable to match {filename} against any "
|
||||
f"template in: {list(template_regex.keys())}")
|
||||
|
||||
result[key] = dict(stack_results)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def load_shelf(path, name=None):
|
||||
"""Add shelf to substance painter (for current application session)
|
||||
|
||||
This will dynamically add a Shelf for the current session. It's good
|
||||
to note however that these will *not* persist on restart of the host.
|
||||
|
||||
Note:
|
||||
Consider the loaded shelf a static library of resources.
|
||||
|
||||
The shelf will *not* be visible in application preferences in
|
||||
Edit > Settings > Libraries.
|
||||
|
||||
The shelf will *not* show in the Assets browser if it has no existing
|
||||
assets
|
||||
|
||||
The shelf will *not* be a selectable option for selecting it as a
|
||||
destination to import resources too.
|
||||
|
||||
"""
|
||||
|
||||
# Ensure expanded path with forward slashes
|
||||
path = os.path.expandvars(path)
|
||||
path = os.path.abspath(path)
|
||||
path = path.replace("\\", "/")
|
||||
|
||||
# Path must exist
|
||||
if not os.path.isdir(path):
|
||||
raise ValueError(f"Path is not an existing folder: {path}")
|
||||
|
||||
# This name must be unique and must only contain lowercase letters,
|
||||
# numbers, underscores or hyphens.
|
||||
if name is None:
|
||||
name = os.path.basename(path)
|
||||
|
||||
name = name.lower()
|
||||
name = re.sub(r"[^a-z0-9_\-]", "_", name) # sanitize to underscores
|
||||
|
||||
if substance_painter.resource.Shelves.exists(name):
|
||||
shelf = next(
|
||||
shelf for shelf in substance_painter.resource.Shelves.all()
|
||||
if shelf.name() == name
|
||||
)
|
||||
if os.path.normpath(shelf.path()) != os.path.normpath(path):
|
||||
raise ValueError(f"Shelf with name '{name}' already exists "
|
||||
f"for a different path: '{shelf.path()}")
|
||||
|
||||
return
|
||||
|
||||
print(f"Adding Shelf '{name}' to path: {path}")
|
||||
substance_painter.resource.Shelves.add(name, path)
|
||||
|
||||
return name
|
||||
|
||||
|
||||
def _get_new_project_action():
|
||||
"""Return QAction which triggers Substance Painter's new project dialog"""
|
||||
|
||||
main_window = substance_painter.ui.get_main_window()
|
||||
|
||||
# Find the file menu's New file action
|
||||
menubar = main_window.menuBar()
|
||||
new_action = None
|
||||
for action in menubar.actions():
|
||||
menu = action.menu()
|
||||
if not menu:
|
||||
continue
|
||||
|
||||
if menu.objectName() != "file":
|
||||
continue
|
||||
|
||||
# Find the action with the CTRL+N key sequence
|
||||
new_action = next(action for action in menu.actions()
|
||||
if action.shortcut() == QtGui.QKeySequence.New)
|
||||
break
|
||||
|
||||
return new_action
|
||||
|
||||
|
||||
def prompt_new_file_with_mesh(mesh_filepath):
|
||||
"""Prompts the user for a new file using Substance Painter's own dialog.
|
||||
|
||||
This will set the mesh path to load to the given mesh and disables the
|
||||
dialog box to disallow the user to change the path. This way we can allow
|
||||
user configuration of a project but set the mesh path ourselves.
|
||||
|
||||
Warning:
|
||||
This is very hacky and experimental.
|
||||
|
||||
Note:
|
||||
If a project is currently open using the same mesh filepath it can't
|
||||
accurately detect whether the user had actually accepted the new project
|
||||
dialog or whether the project afterwards is still the original project,
|
||||
for example when the user might have cancelled the operation.
|
||||
|
||||
"""
|
||||
|
||||
app = QtWidgets.QApplication.instance()
|
||||
assert os.path.isfile(mesh_filepath), \
|
||||
f"Mesh filepath does not exist: {mesh_filepath}"
|
||||
|
||||
def _setup_file_dialog():
|
||||
"""Set filepath in QFileDialog and trigger accept result"""
|
||||
file_dialog = app.activeModalWidget()
|
||||
assert isinstance(file_dialog, QtWidgets.QFileDialog)
|
||||
|
||||
# Quickly hide the dialog
|
||||
file_dialog.hide()
|
||||
app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents, 1000)
|
||||
|
||||
file_dialog.setDirectory(os.path.dirname(mesh_filepath))
|
||||
url = QtCore.QUrl.fromLocalFile(os.path.basename(mesh_filepath))
|
||||
file_dialog.selectUrl(url)
|
||||
|
||||
# Give the explorer window time to refresh to the folder and select
|
||||
# the file
|
||||
while not file_dialog.selectedFiles():
|
||||
app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents, 1000)
|
||||
print(f"Selected: {file_dialog.selectedFiles()}")
|
||||
|
||||
# Set it again now we know the path is refreshed - without this
|
||||
# accepting the dialog will often not trigger the correct filepath
|
||||
file_dialog.setDirectory(os.path.dirname(mesh_filepath))
|
||||
url = QtCore.QUrl.fromLocalFile(os.path.basename(mesh_filepath))
|
||||
file_dialog.selectUrl(url)
|
||||
|
||||
file_dialog.done(file_dialog.Accepted)
|
||||
app.processEvents(QtCore.QEventLoop.AllEvents)
|
||||
|
||||
def _setup_prompt():
|
||||
app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents)
|
||||
dialog = app.activeModalWidget()
|
||||
assert dialog.objectName() == "NewProjectDialog"
|
||||
|
||||
# Set the window title
|
||||
mesh = os.path.basename(mesh_filepath)
|
||||
dialog.setWindowTitle(f"New Project with mesh: {mesh}")
|
||||
|
||||
# Get the select mesh file button
|
||||
mesh_select = dialog.findChild(QtWidgets.QPushButton, "meshSelect")
|
||||
|
||||
# Hide the select mesh button to the user to block changing of mesh
|
||||
mesh_select.setVisible(False)
|
||||
|
||||
# Ensure UI is visually up-to-date
|
||||
app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents)
|
||||
|
||||
# Trigger the 'select file' dialog to set the path and have the
|
||||
# new file dialog to use the path.
|
||||
QtCore.QTimer.singleShot(10, _setup_file_dialog)
|
||||
mesh_select.click()
|
||||
|
||||
app.processEvents(QtCore.QEventLoop.AllEvents, 5000)
|
||||
|
||||
mesh_filename = dialog.findChild(QtWidgets.QFrame, "meshFileName")
|
||||
mesh_filename_label = mesh_filename.findChild(QtWidgets.QLabel)
|
||||
if not mesh_filename_label.text():
|
||||
dialog.close()
|
||||
raise RuntimeError(f"Failed to set mesh path: {mesh_filepath}")
|
||||
|
||||
new_action = _get_new_project_action()
|
||||
if not new_action:
|
||||
raise RuntimeError("Unable to detect new file action..")
|
||||
|
||||
QtCore.QTimer.singleShot(0, _setup_prompt)
|
||||
new_action.trigger()
|
||||
app.processEvents(QtCore.QEventLoop.AllEvents, 5000)
|
||||
|
||||
if not substance_painter.project.is_open():
|
||||
return
|
||||
|
||||
# Confirm mesh was set as expected
|
||||
project_mesh = substance_painter.project.last_imported_mesh_path()
|
||||
if os.path.normpath(project_mesh) != os.path.normpath(mesh_filepath):
|
||||
return
|
||||
|
||||
return project_mesh
|
||||
427
openpype/hosts/substancepainter/api/pipeline.py
Normal file
427
openpype/hosts/substancepainter/api/pipeline.py
Normal file
|
|
@ -0,0 +1,427 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Pipeline tools for OpenPype Substance Painter integration."""
|
||||
import os
|
||||
import logging
|
||||
from functools import partial
|
||||
|
||||
# Substance 3D Painter modules
|
||||
import substance_painter.ui
|
||||
import substance_painter.event
|
||||
import substance_painter.project
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost
|
||||
from openpype.settings import (
|
||||
get_current_project_settings,
|
||||
get_system_settings
|
||||
)
|
||||
|
||||
from openpype.pipeline.template_data import get_template_data_with_names
|
||||
from openpype.pipeline import (
|
||||
register_creator_plugin_path,
|
||||
register_loader_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
Anatomy
|
||||
)
|
||||
from openpype.lib import (
|
||||
StringTemplate,
|
||||
register_event_callback,
|
||||
emit_event,
|
||||
)
|
||||
from openpype.pipeline.load import any_outdated_containers
|
||||
from openpype.hosts.substancepainter import SUBSTANCE_HOST_DIR
|
||||
|
||||
from . import lib
|
||||
|
||||
log = logging.getLogger("openpype.hosts.substance")
|
||||
|
||||
PLUGINS_DIR = os.path.join(SUBSTANCE_HOST_DIR, "plugins")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
OPENPYPE_METADATA_KEY = "OpenPype"
|
||||
OPENPYPE_METADATA_CONTAINERS_KEY = "containers" # child key
|
||||
OPENPYPE_METADATA_CONTEXT_KEY = "context" # child key
|
||||
OPENPYPE_METADATA_INSTANCES_KEY = "instances" # child key
|
||||
|
||||
|
||||
class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
||||
name = "substancepainter"
|
||||
|
||||
def __init__(self):
|
||||
super(SubstanceHost, self).__init__()
|
||||
self._has_been_setup = False
|
||||
self.menu = None
|
||||
self.callbacks = []
|
||||
self.shelves = []
|
||||
|
||||
def install(self):
|
||||
pyblish.api.register_host("substancepainter")
|
||||
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
log.info("Installing callbacks ... ")
|
||||
# register_event_callback("init", on_init)
|
||||
self._register_callbacks()
|
||||
# register_event_callback("before.save", before_save)
|
||||
# register_event_callback("save", on_save)
|
||||
register_event_callback("open", on_open)
|
||||
# register_event_callback("new", on_new)
|
||||
|
||||
log.info("Installing menu ... ")
|
||||
self._install_menu()
|
||||
|
||||
project_settings = get_current_project_settings()
|
||||
self._install_shelves(project_settings)
|
||||
|
||||
self._has_been_setup = True
|
||||
|
||||
def uninstall(self):
|
||||
self._uninstall_shelves()
|
||||
self._uninstall_menu()
|
||||
self._deregister_callbacks()
|
||||
|
||||
def has_unsaved_changes(self):
|
||||
|
||||
if not substance_painter.project.is_open():
|
||||
return False
|
||||
|
||||
return substance_painter.project.needs_saving()
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".spp", ".toc"]
|
||||
|
||||
def save_workfile(self, dst_path=None):
|
||||
|
||||
if not substance_painter.project.is_open():
|
||||
return False
|
||||
|
||||
if not dst_path:
|
||||
dst_path = self.get_current_workfile()
|
||||
|
||||
full_save_mode = substance_painter.project.ProjectSaveMode.Full
|
||||
substance_painter.project.save_as(dst_path, full_save_mode)
|
||||
|
||||
return dst_path
|
||||
|
||||
def open_workfile(self, filepath):
|
||||
|
||||
if not os.path.exists(filepath):
|
||||
raise RuntimeError("File does not exist: {}".format(filepath))
|
||||
|
||||
# We must first explicitly close current project before opening another
|
||||
if substance_painter.project.is_open():
|
||||
substance_painter.project.close()
|
||||
|
||||
substance_painter.project.open(filepath)
|
||||
return filepath
|
||||
|
||||
def get_current_workfile(self):
|
||||
if not substance_painter.project.is_open():
|
||||
return None
|
||||
|
||||
filepath = substance_painter.project.file_path()
|
||||
if filepath and filepath.endswith(".spt"):
|
||||
# When currently in a Substance Painter template assume our
|
||||
# scene isn't saved. This can be the case directly after doing
|
||||
# "New project", the path will then be the template used. This
|
||||
# avoids Workfiles tool trying to save as .spt extension if the
|
||||
# file hasn't been saved before.
|
||||
return
|
||||
|
||||
return filepath
|
||||
|
||||
def get_containers(self):
|
||||
|
||||
if not substance_painter.project.is_open():
|
||||
return
|
||||
|
||||
metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY)
|
||||
containers = metadata.get(OPENPYPE_METADATA_CONTAINERS_KEY)
|
||||
if containers:
|
||||
for key, container in containers.items():
|
||||
container["objectName"] = key
|
||||
yield container
|
||||
|
||||
def update_context_data(self, data, changes):
|
||||
|
||||
if not substance_painter.project.is_open():
|
||||
return
|
||||
|
||||
metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY)
|
||||
metadata.set(OPENPYPE_METADATA_CONTEXT_KEY, data)
|
||||
|
||||
def get_context_data(self):
|
||||
|
||||
if not substance_painter.project.is_open():
|
||||
return
|
||||
|
||||
metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY)
|
||||
return metadata.get(OPENPYPE_METADATA_CONTEXT_KEY) or {}
|
||||
|
||||
def _install_menu(self):
|
||||
from PySide2 import QtWidgets
|
||||
from openpype.tools.utils import host_tools
|
||||
|
||||
parent = substance_painter.ui.get_main_window()
|
||||
|
||||
menu = QtWidgets.QMenu("OpenPype")
|
||||
|
||||
action = menu.addAction("Create...")
|
||||
action.triggered.connect(
|
||||
lambda: host_tools.show_publisher(parent=parent,
|
||||
tab="create")
|
||||
)
|
||||
|
||||
action = menu.addAction("Load...")
|
||||
action.triggered.connect(
|
||||
lambda: host_tools.show_loader(parent=parent, use_context=True)
|
||||
)
|
||||
|
||||
action = menu.addAction("Publish...")
|
||||
action.triggered.connect(
|
||||
lambda: host_tools.show_publisher(parent=parent,
|
||||
tab="publish")
|
||||
)
|
||||
|
||||
action = menu.addAction("Manage...")
|
||||
action.triggered.connect(
|
||||
lambda: host_tools.show_scene_inventory(parent=parent)
|
||||
)
|
||||
|
||||
action = menu.addAction("Library...")
|
||||
action.triggered.connect(
|
||||
lambda: host_tools.show_library_loader(parent=parent)
|
||||
)
|
||||
|
||||
menu.addSeparator()
|
||||
action = menu.addAction("Work Files...")
|
||||
action.triggered.connect(
|
||||
lambda: host_tools.show_workfiles(parent=parent)
|
||||
)
|
||||
|
||||
substance_painter.ui.add_menu(menu)
|
||||
|
||||
def on_menu_destroyed():
|
||||
self.menu = None
|
||||
|
||||
menu.destroyed.connect(on_menu_destroyed)
|
||||
|
||||
self.menu = menu
|
||||
|
||||
def _uninstall_menu(self):
|
||||
if self.menu:
|
||||
self.menu.destroy()
|
||||
self.menu = None
|
||||
|
||||
def _register_callbacks(self):
|
||||
# Prepare emit event callbacks
|
||||
open_callback = partial(emit_event, "open")
|
||||
|
||||
# Connect to the Substance Painter events
|
||||
dispatcher = substance_painter.event.DISPATCHER
|
||||
for event, callback in [
|
||||
(substance_painter.event.ProjectOpened, open_callback)
|
||||
]:
|
||||
dispatcher.connect(event, callback)
|
||||
# Keep a reference so we can deregister if needed
|
||||
self.callbacks.append((event, callback))
|
||||
|
||||
def _deregister_callbacks(self):
|
||||
for event, callback in self.callbacks:
|
||||
substance_painter.event.DISPATCHER.disconnect(event, callback)
|
||||
self.callbacks.clear()
|
||||
|
||||
def _install_shelves(self, project_settings):
|
||||
|
||||
shelves = project_settings["substancepainter"].get("shelves", {})
|
||||
if not shelves:
|
||||
return
|
||||
|
||||
# Prepare formatting data if we detect any path which might have
|
||||
# template tokens like {asset} in there.
|
||||
formatting_data = {}
|
||||
has_formatting_entries = any("{" in path for path in shelves.values())
|
||||
if has_formatting_entries:
|
||||
project_name = self.get_current_project_name()
|
||||
asset_name = self.get_current_asset_name()
|
||||
task_name = self.get_current_asset_name()
|
||||
system_settings = get_system_settings()
|
||||
formatting_data = get_template_data_with_names(project_name,
|
||||
asset_name,
|
||||
task_name,
|
||||
system_settings)
|
||||
anatomy = Anatomy(project_name)
|
||||
formatting_data["root"] = anatomy.roots
|
||||
|
||||
for name, path in shelves.items():
|
||||
shelf_name = None
|
||||
|
||||
# Allow formatting with anatomy for the paths
|
||||
if "{" in path:
|
||||
path = StringTemplate.format_template(path, formatting_data)
|
||||
|
||||
try:
|
||||
shelf_name = lib.load_shelf(path, name=name)
|
||||
except ValueError as exc:
|
||||
print(f"Failed to load shelf -> {exc}")
|
||||
|
||||
if shelf_name:
|
||||
self.shelves.append(shelf_name)
|
||||
|
||||
def _uninstall_shelves(self):
|
||||
for shelf_name in self.shelves:
|
||||
substance_painter.resource.Shelves.remove(shelf_name)
|
||||
self.shelves.clear()
|
||||
|
||||
|
||||
def on_open():
|
||||
log.info("Running callback on open..")
|
||||
|
||||
if any_outdated_containers():
|
||||
from openpype.widgets import popup
|
||||
|
||||
log.warning("Scene has outdated content.")
|
||||
|
||||
# Get main window
|
||||
parent = substance_painter.ui.get_main_window()
|
||||
if parent is None:
|
||||
log.info("Skipping outdated content pop-up "
|
||||
"because Substance window can't be found.")
|
||||
else:
|
||||
|
||||
# Show outdated pop-up
|
||||
def _on_show_inventory():
|
||||
from openpype.tools.utils import host_tools
|
||||
host_tools.show_scene_inventory(parent=parent)
|
||||
|
||||
dialog = popup.Popup(parent=parent)
|
||||
dialog.setWindowTitle("Substance scene has outdated content")
|
||||
dialog.setMessage("There are outdated containers in "
|
||||
"your Substance scene.")
|
||||
dialog.on_clicked.connect(_on_show_inventory)
|
||||
dialog.show()
|
||||
|
||||
|
||||
def imprint_container(container,
|
||||
name,
|
||||
namespace,
|
||||
context,
|
||||
loader):
|
||||
"""Imprint a loaded container with metadata.
|
||||
|
||||
Containerisation enables a tracking of version, author and origin
|
||||
for loaded assets.
|
||||
|
||||
Arguments:
|
||||
container (dict): The (substance metadata) dictionary to imprint into.
|
||||
name (str): Name of resulting assembly
|
||||
namespace (str): Namespace under which to host container
|
||||
context (dict): Asset information
|
||||
loader (load.LoaderPlugin): loader instance used to produce container.
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
|
||||
data = [
|
||||
("schema", "openpype:container-2.0"),
|
||||
("id", AVALON_CONTAINER_ID),
|
||||
("name", str(name)),
|
||||
("namespace", str(namespace) if namespace else None),
|
||||
("loader", str(loader.__class__.__name__)),
|
||||
("representation", str(context["representation"]["_id"])),
|
||||
]
|
||||
for key, value in data:
|
||||
container[key] = value
|
||||
|
||||
|
||||
def set_container_metadata(object_name, container_data, update=False):
|
||||
"""Helper method to directly set the data for a specific container
|
||||
|
||||
Args:
|
||||
object_name (str): The unique object name identifier for the container
|
||||
container_data (dict): The data for the container.
|
||||
Note 'objectName' data is derived from `object_name` and key in
|
||||
`container_data` will be ignored.
|
||||
update (bool): Whether to only update the dict data.
|
||||
|
||||
"""
|
||||
# The objectName is derived from the key in the metadata so won't be stored
|
||||
# in the metadata in the container's data.
|
||||
container_data.pop("objectName", None)
|
||||
|
||||
metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY)
|
||||
containers = metadata.get(OPENPYPE_METADATA_CONTAINERS_KEY) or {}
|
||||
if update:
|
||||
existing_data = containers.setdefault(object_name, {})
|
||||
existing_data.update(container_data) # mutable dict, in-place update
|
||||
else:
|
||||
containers[object_name] = container_data
|
||||
metadata.set("containers", containers)
|
||||
|
||||
|
||||
def remove_container_metadata(object_name):
|
||||
"""Helper method to remove the data for a specific container"""
|
||||
metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY)
|
||||
containers = metadata.get(OPENPYPE_METADATA_CONTAINERS_KEY)
|
||||
if containers:
|
||||
containers.pop(object_name, None)
|
||||
metadata.set("containers", containers)
|
||||
|
||||
|
||||
def set_instance(instance_id, instance_data, update=False):
|
||||
"""Helper method to directly set the data for a specific container
|
||||
|
||||
Args:
|
||||
instance_id (str): Unique identifier for the instance
|
||||
instance_data (dict): The instance data to store in the metaadata.
|
||||
"""
|
||||
set_instances({instance_id: instance_data}, update=update)
|
||||
|
||||
|
||||
def set_instances(instance_data_by_id, update=False):
|
||||
"""Store data for multiple instances at the same time.
|
||||
|
||||
This is more optimal than querying and setting them in the metadata one
|
||||
by one.
|
||||
"""
|
||||
metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY)
|
||||
instances = metadata.get(OPENPYPE_METADATA_INSTANCES_KEY) or {}
|
||||
|
||||
for instance_id, instance_data in instance_data_by_id.items():
|
||||
if update:
|
||||
existing_data = instances.get(instance_id, {})
|
||||
existing_data.update(instance_data)
|
||||
else:
|
||||
instances[instance_id] = instance_data
|
||||
|
||||
metadata.set("instances", instances)
|
||||
|
||||
|
||||
def remove_instance(instance_id):
|
||||
"""Helper method to remove the data for a specific container"""
|
||||
metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY)
|
||||
instances = metadata.get(OPENPYPE_METADATA_INSTANCES_KEY) or {}
|
||||
instances.pop(instance_id, None)
|
||||
metadata.set("instances", instances)
|
||||
|
||||
|
||||
def get_instances_by_id():
|
||||
"""Return all instances stored in the project instances metadata"""
|
||||
if not substance_painter.project.is_open():
|
||||
return {}
|
||||
|
||||
metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY)
|
||||
return metadata.get(OPENPYPE_METADATA_INSTANCES_KEY) or {}
|
||||
|
||||
|
||||
def get_instances():
|
||||
"""Return all instances stored in the project instances as a list"""
|
||||
return list(get_instances_by_id().values())
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
|
||||
|
||||
def cleanup_openpype_qt_widgets():
|
||||
"""
|
||||
Workaround for Substance failing to shut down correctly
|
||||
when a Qt window was still open at the time of shutting down.
|
||||
|
||||
This seems to work sometimes, but not all the time.
|
||||
|
||||
"""
|
||||
# TODO: Create a more reliable method to close down all OpenPype Qt widgets
|
||||
from PySide2 import QtWidgets
|
||||
import substance_painter.ui
|
||||
|
||||
# Kill OpenPype Qt widgets
|
||||
print("Killing OpenPype Qt widgets..")
|
||||
for widget in QtWidgets.QApplication.topLevelWidgets():
|
||||
if widget.__module__.startswith("openpype."):
|
||||
print(f"Deleting widget: {widget.__class__.__name__}")
|
||||
substance_painter.ui.delete_ui_element(widget)
|
||||
|
||||
|
||||
def start_plugin():
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.hosts.substancepainter.api import SubstanceHost
|
||||
install_host(SubstanceHost())
|
||||
|
||||
|
||||
def close_plugin():
|
||||
from openpype.pipeline import uninstall_host
|
||||
cleanup_openpype_qt_widgets()
|
||||
uninstall_host()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
start_plugin()
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
"""Ease the OpenPype on-boarding process by loading the plug-in on first run"""
|
||||
|
||||
OPENPYPE_PLUGIN_NAME = "openpype_plugin"
|
||||
|
||||
|
||||
def start_plugin():
|
||||
try:
|
||||
# This isn't exposed in the official API so we keep it in a try-except
|
||||
from painter_plugins_ui import (
|
||||
get_settings,
|
||||
LAUNCH_AT_START_KEY,
|
||||
ON_STATE,
|
||||
PLUGINS_MENU,
|
||||
plugin_manager
|
||||
)
|
||||
|
||||
# The `painter_plugins_ui` plug-in itself is also a startup plug-in
|
||||
# we need to take into account that it could run either earlier or
|
||||
# later than this startup script, we check whether its menu initialized
|
||||
is_before_plugins_menu = PLUGINS_MENU is None
|
||||
|
||||
settings = get_settings(OPENPYPE_PLUGIN_NAME)
|
||||
if settings.value(LAUNCH_AT_START_KEY, None) is None:
|
||||
print("Initializing OpenPype plug-in on first run...")
|
||||
if is_before_plugins_menu:
|
||||
print("- running before 'painter_plugins_ui'")
|
||||
# Delay the launch to the painter_plugins_ui initialization
|
||||
settings.setValue(LAUNCH_AT_START_KEY, ON_STATE)
|
||||
else:
|
||||
# Launch now
|
||||
print("- running after 'painter_plugins_ui'")
|
||||
plugin_manager(OPENPYPE_PLUGIN_NAME)(True)
|
||||
|
||||
# Set the checked state in the menu to avoid confusion
|
||||
action = next(action for action in PLUGINS_MENU._menu.actions()
|
||||
if action.text() == OPENPYPE_PLUGIN_NAME)
|
||||
if action is not None:
|
||||
action.blockSignals(True)
|
||||
action.setChecked(True)
|
||||
action.blockSignals(False)
|
||||
|
||||
except Exception as exc:
|
||||
print(exc)
|
||||
|
|
@ -0,0 +1,162 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating textures."""
|
||||
|
||||
from openpype.pipeline import CreatedInstance, Creator, CreatorError
|
||||
from openpype.lib import (
|
||||
EnumDef,
|
||||
UILabelDef,
|
||||
NumberDef,
|
||||
BoolDef
|
||||
)
|
||||
|
||||
from openpype.hosts.substancepainter.api.pipeline import (
|
||||
get_instances,
|
||||
set_instance,
|
||||
set_instances,
|
||||
remove_instance
|
||||
)
|
||||
from openpype.hosts.substancepainter.api.lib import get_export_presets
|
||||
|
||||
import substance_painter.project
|
||||
|
||||
|
||||
class CreateTextures(Creator):
|
||||
"""Create a texture set."""
|
||||
identifier = "io.openpype.creators.substancepainter.textureset"
|
||||
label = "Textures"
|
||||
family = "textureSet"
|
||||
icon = "picture-o"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
|
||||
if not substance_painter.project.is_open():
|
||||
raise CreatorError("Can't create a Texture Set instance without "
|
||||
"an open project.")
|
||||
|
||||
instance = self.create_instance_in_context(subset_name,
|
||||
instance_data)
|
||||
set_instance(
|
||||
instance_id=instance["instance_id"],
|
||||
instance_data=instance.data_to_store()
|
||||
)
|
||||
|
||||
def collect_instances(self):
|
||||
for instance in get_instances():
|
||||
if (instance.get("creator_identifier") == self.identifier or
|
||||
instance.get("family") == self.family):
|
||||
self.create_instance_in_context_from_existing(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
instance_data_by_id = {}
|
||||
for instance, _changes in update_list:
|
||||
# Persist the data
|
||||
instance_id = instance.get("instance_id")
|
||||
instance_data = instance.data_to_store()
|
||||
instance_data_by_id[instance_id] = instance_data
|
||||
set_instances(instance_data_by_id, update=True)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
for instance in instances:
|
||||
remove_instance(instance["instance_id"])
|
||||
self._remove_instance_from_context(instance)
|
||||
|
||||
# Helper methods (this might get moved into Creator class)
|
||||
def create_instance_in_context(self, subset_name, data):
|
||||
instance = CreatedInstance(
|
||||
self.family, subset_name, data, self
|
||||
)
|
||||
self.create_context.creator_adds_instance(instance)
|
||||
return instance
|
||||
|
||||
def create_instance_in_context_from_existing(self, data):
|
||||
instance = CreatedInstance.from_existing(data, self)
|
||||
self.create_context.creator_adds_instance(instance)
|
||||
return instance
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
|
||||
return [
|
||||
EnumDef("exportPresetUrl",
|
||||
items=get_export_presets(),
|
||||
label="Output Template"),
|
||||
BoolDef("allowSkippedMaps",
|
||||
label="Allow Skipped Output Maps",
|
||||
tooltip="When enabled this allows the publish to ignore "
|
||||
"output maps in the used output template if one "
|
||||
"or more maps are skipped due to the required "
|
||||
"channels not being present in the current file.",
|
||||
default=True),
|
||||
EnumDef("exportFileFormat",
|
||||
items={
|
||||
None: "Based on output template",
|
||||
# TODO: Get available extensions from substance API
|
||||
"bmp": "bmp",
|
||||
"ico": "ico",
|
||||
"jpeg": "jpeg",
|
||||
"jng": "jng",
|
||||
"pbm": "pbm",
|
||||
"pgm": "pgm",
|
||||
"png": "png",
|
||||
"ppm": "ppm",
|
||||
"tga": "targa",
|
||||
"tif": "tiff",
|
||||
"wap": "wap",
|
||||
"wbmp": "wbmp",
|
||||
"xpm": "xpm",
|
||||
"gif": "gif",
|
||||
"hdr": "hdr",
|
||||
"exr": "exr",
|
||||
"j2k": "j2k",
|
||||
"jp2": "jp2",
|
||||
"pfm": "pfm",
|
||||
"webp": "webp",
|
||||
# TODO: Unsure why jxr format fails to export
|
||||
# "jxr": "jpeg-xr",
|
||||
# TODO: File formats that combine the exported textures
|
||||
# like psd are not correctly supported due to
|
||||
# publishing only a single file
|
||||
# "psd": "psd",
|
||||
# "sbsar": "sbsar",
|
||||
},
|
||||
default=None,
|
||||
label="File type"),
|
||||
EnumDef("exportSize",
|
||||
items={
|
||||
None: "Based on each Texture Set's size",
|
||||
# The key is size of the texture file in log2.
|
||||
# (i.e. 10 means 2^10 = 1024)
|
||||
7: "128",
|
||||
8: "256",
|
||||
9: "512",
|
||||
10: "1024",
|
||||
11: "2048",
|
||||
12: "4096"
|
||||
},
|
||||
default=None,
|
||||
label="Size"),
|
||||
|
||||
EnumDef("exportPadding",
|
||||
items={
|
||||
"passthrough": "No padding (passthrough)",
|
||||
"infinite": "Dilation infinite",
|
||||
"transparent": "Dilation + transparent",
|
||||
"color": "Dilation + default background color",
|
||||
"diffusion": "Dilation + diffusion"
|
||||
},
|
||||
default="infinite",
|
||||
label="Padding"),
|
||||
NumberDef("exportDilationDistance",
|
||||
minimum=0,
|
||||
maximum=256,
|
||||
decimals=0,
|
||||
default=16,
|
||||
label="Dilation Distance"),
|
||||
UILabelDef("*only used with "
|
||||
"'Dilation + <x>' padding"),
|
||||
]
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
# Use same attributes as for instance attributes
|
||||
return self.get_instance_attr_defs()
|
||||
|
|
@ -0,0 +1,101 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating workfiles."""
|
||||
|
||||
from openpype.pipeline import CreatedInstance, AutoCreator
|
||||
from openpype.client import get_asset_by_name
|
||||
|
||||
from openpype.hosts.substancepainter.api.pipeline import (
|
||||
set_instances,
|
||||
set_instance,
|
||||
get_instances
|
||||
)
|
||||
|
||||
import substance_painter.project
|
||||
|
||||
|
||||
class CreateWorkfile(AutoCreator):
|
||||
"""Workfile auto-creator."""
|
||||
identifier = "io.openpype.creators.substancepainter.workfile"
|
||||
label = "Workfile"
|
||||
family = "workfile"
|
||||
icon = "document"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def create(self):
|
||||
|
||||
if not substance_painter.project.is_open():
|
||||
return
|
||||
|
||||
variant = self.default_variant
|
||||
project_name = self.project_name
|
||||
asset_name = self.create_context.get_current_asset_name()
|
||||
task_name = self.create_context.get_current_task_name()
|
||||
host_name = self.create_context.host_name
|
||||
|
||||
# Workfile instance should always exist and must only exist once.
|
||||
# As such we'll first check if it already exists and is collected.
|
||||
current_instance = next(
|
||||
(
|
||||
instance for instance in self.create_context.instances
|
||||
if instance.creator_identifier == self.identifier
|
||||
), None)
|
||||
|
||||
if current_instance is None:
|
||||
self.log.info("Auto-creating workfile instance...")
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
)
|
||||
data = {
|
||||
"asset": asset_name,
|
||||
"task": task_name,
|
||||
"variant": variant
|
||||
}
|
||||
current_instance = self.create_instance_in_context(subset_name,
|
||||
data)
|
||||
elif (
|
||||
current_instance["asset"] != asset_name
|
||||
or current_instance["task"] != task_name
|
||||
):
|
||||
# Update instance context if is not the same
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
)
|
||||
current_instance["asset"] = asset_name
|
||||
current_instance["task"] = task_name
|
||||
current_instance["subset"] = subset_name
|
||||
|
||||
set_instance(
|
||||
instance_id=current_instance.get("instance_id"),
|
||||
instance_data=current_instance.data_to_store()
|
||||
)
|
||||
|
||||
def collect_instances(self):
|
||||
for instance in get_instances():
|
||||
if (instance.get("creator_identifier") == self.identifier or
|
||||
instance.get("family") == self.family):
|
||||
self.create_instance_in_context_from_existing(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
instance_data_by_id = {}
|
||||
for instance, _changes in update_list:
|
||||
# Persist the data
|
||||
instance_id = instance.get("instance_id")
|
||||
instance_data = instance.data_to_store()
|
||||
instance_data_by_id[instance_id] = instance_data
|
||||
set_instances(instance_data_by_id, update=True)
|
||||
|
||||
# Helper methods (this might get moved into Creator class)
|
||||
def create_instance_in_context(self, subset_name, data):
|
||||
instance = CreatedInstance(
|
||||
self.family, subset_name, data, self
|
||||
)
|
||||
self.create_context.creator_adds_instance(instance)
|
||||
return instance
|
||||
|
||||
def create_instance_in_context_from_existing(self, data):
|
||||
instance = CreatedInstance.from_existing(data, self)
|
||||
self.create_context.creator_adds_instance(instance)
|
||||
return instance
|
||||
124
openpype/hosts/substancepainter/plugins/load/load_mesh.py
Normal file
124
openpype/hosts/substancepainter/plugins/load/load_mesh.py
Normal file
|
|
@ -0,0 +1,124 @@
|
|||
from openpype.pipeline import (
|
||||
load,
|
||||
get_representation_path,
|
||||
)
|
||||
from openpype.pipeline.load import LoadError
|
||||
from openpype.hosts.substancepainter.api.pipeline import (
|
||||
imprint_container,
|
||||
set_container_metadata,
|
||||
remove_container_metadata
|
||||
)
|
||||
from openpype.hosts.substancepainter.api.lib import prompt_new_file_with_mesh
|
||||
|
||||
import substance_painter.project
|
||||
import qargparse
|
||||
|
||||
|
||||
class SubstanceLoadProjectMesh(load.LoaderPlugin):
|
||||
"""Load mesh for project"""
|
||||
|
||||
families = ["*"]
|
||||
representations = ["abc", "fbx", "obj", "gltf"]
|
||||
|
||||
label = "Load mesh"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
options = [
|
||||
qargparse.Boolean(
|
||||
"preserve_strokes",
|
||||
default=True,
|
||||
help="Preserve strokes positions on mesh.\n"
|
||||
"(only relevant when loading into existing project)"
|
||||
),
|
||||
qargparse.Boolean(
|
||||
"import_cameras",
|
||||
default=True,
|
||||
help="Import cameras from the mesh file."
|
||||
)
|
||||
]
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
|
||||
# Get user inputs
|
||||
import_cameras = data.get("import_cameras", True)
|
||||
preserve_strokes = data.get("preserve_strokes", True)
|
||||
|
||||
if not substance_painter.project.is_open():
|
||||
# Allow to 'initialize' a new project
|
||||
result = prompt_new_file_with_mesh(mesh_filepath=self.fname)
|
||||
if not result:
|
||||
self.log.info("User cancelled new project prompt.")
|
||||
return
|
||||
|
||||
else:
|
||||
# Reload the mesh
|
||||
settings = substance_painter.project.MeshReloadingSettings(
|
||||
import_cameras=import_cameras,
|
||||
preserve_strokes=preserve_strokes
|
||||
)
|
||||
|
||||
def on_mesh_reload(status: substance_painter.project.ReloadMeshStatus): # noqa
|
||||
if status == substance_painter.project.ReloadMeshStatus.SUCCESS: # noqa
|
||||
self.log.info("Reload succeeded")
|
||||
else:
|
||||
raise LoadError("Reload of mesh failed")
|
||||
|
||||
path = self.fname
|
||||
substance_painter.project.reload_mesh(path,
|
||||
settings,
|
||||
on_mesh_reload)
|
||||
|
||||
# Store container
|
||||
container = {}
|
||||
project_mesh_object_name = "_ProjectMesh_"
|
||||
imprint_container(container,
|
||||
name=project_mesh_object_name,
|
||||
namespace=project_mesh_object_name,
|
||||
context=context,
|
||||
loader=self)
|
||||
|
||||
# We want store some options for updating to keep consistent behavior
|
||||
# from the user's original choice. We don't store 'preserve_strokes'
|
||||
# as we always preserve strokes on updates.
|
||||
container["options"] = {
|
||||
"import_cameras": import_cameras,
|
||||
}
|
||||
|
||||
set_container_metadata(project_mesh_object_name, container)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
path = get_representation_path(representation)
|
||||
|
||||
# Reload the mesh
|
||||
container_options = container.get("options", {})
|
||||
settings = substance_painter.project.MeshReloadingSettings(
|
||||
import_cameras=container_options.get("import_cameras", True),
|
||||
preserve_strokes=True
|
||||
)
|
||||
|
||||
def on_mesh_reload(status: substance_painter.project.ReloadMeshStatus):
|
||||
if status == substance_painter.project.ReloadMeshStatus.SUCCESS:
|
||||
self.log.info("Reload succeeded")
|
||||
else:
|
||||
raise LoadError("Reload of mesh failed")
|
||||
|
||||
substance_painter.project.reload_mesh(path, settings, on_mesh_reload)
|
||||
|
||||
# Update container representation
|
||||
object_name = container["objectName"]
|
||||
update_data = {"representation": str(representation["_id"])}
|
||||
set_container_metadata(object_name, update_data, update=True)
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
# Remove OpenPype related settings about what model was loaded
|
||||
# or close the project?
|
||||
# TODO: This is likely best 'hidden' away to the user because
|
||||
# this will leave the project's mesh unmanaged.
|
||||
remove_container_metadata(container["objectName"])
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import registered_host
|
||||
|
||||
|
||||
class CollectCurrentFile(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.49
|
||||
label = "Current Workfile"
|
||||
hosts = ["substancepainter"]
|
||||
|
||||
def process(self, context):
|
||||
host = registered_host()
|
||||
path = host.get_current_workfile()
|
||||
context.data["currentFile"] = path
|
||||
self.log.debug(f"Current workfile: {path}")
|
||||
|
|
@ -0,0 +1,196 @@
|
|||
import os
|
||||
import copy
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import publish
|
||||
|
||||
import substance_painter.textureset
|
||||
from openpype.hosts.substancepainter.api.lib import (
|
||||
get_parsed_export_maps,
|
||||
strip_template
|
||||
)
|
||||
from openpype.pipeline.create import get_subset_name
|
||||
from openpype.client import get_asset_by_name
|
||||
|
||||
|
||||
class CollectTextureSet(pyblish.api.InstancePlugin):
|
||||
"""Extract Textures using an output template config"""
|
||||
# TODO: Production-test usage of color spaces
|
||||
# TODO: Detect what source data channels end up in each file
|
||||
|
||||
label = "Collect Texture Set images"
|
||||
hosts = ["substancepainter"]
|
||||
families = ["textureSet"]
|
||||
order = pyblish.api.CollectorOrder
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
config = self.get_export_config(instance)
|
||||
asset_doc = get_asset_by_name(
|
||||
project_name=instance.context.data["projectName"],
|
||||
asset_name=instance.data["asset"]
|
||||
)
|
||||
|
||||
instance.data["exportConfig"] = config
|
||||
maps = get_parsed_export_maps(config)
|
||||
|
||||
# Let's break the instance into multiple instances to integrate
|
||||
# a subset per generated texture or texture UDIM sequence
|
||||
for (texture_set_name, stack_name), template_maps in maps.items():
|
||||
self.log.info(f"Processing {texture_set_name}/{stack_name}")
|
||||
for template, outputs in template_maps.items():
|
||||
self.log.info(f"Processing {template}")
|
||||
self.create_image_instance(instance, template, outputs,
|
||||
asset_doc=asset_doc,
|
||||
texture_set_name=texture_set_name,
|
||||
stack_name=stack_name)
|
||||
|
||||
def create_image_instance(self, instance, template, outputs,
|
||||
asset_doc, texture_set_name, stack_name):
|
||||
"""Create a new instance per image or UDIM sequence.
|
||||
|
||||
The new instances will be of family `image`.
|
||||
|
||||
"""
|
||||
|
||||
context = instance.context
|
||||
first_filepath = outputs[0]["filepath"]
|
||||
fnames = [os.path.basename(output["filepath"]) for output in outputs]
|
||||
ext = os.path.splitext(first_filepath)[1]
|
||||
assert ext.lstrip("."), f"No extension: {ext}"
|
||||
|
||||
always_include_texture_set_name = False # todo: make this configurable
|
||||
all_texture_sets = substance_painter.textureset.all_texture_sets()
|
||||
texture_set = substance_painter.textureset.TextureSet.from_name(
|
||||
texture_set_name
|
||||
)
|
||||
|
||||
# Define the suffix we want to give this particular texture
|
||||
# set and set up a remapped subset naming for it.
|
||||
suffix = ""
|
||||
if always_include_texture_set_name or len(all_texture_sets) > 1:
|
||||
# More than one texture set, include texture set name
|
||||
suffix += f".{texture_set_name}"
|
||||
if texture_set.is_layered_material() and stack_name:
|
||||
# More than one stack, include stack name
|
||||
suffix += f".{stack_name}"
|
||||
|
||||
# Always include the map identifier
|
||||
map_identifier = strip_template(template)
|
||||
suffix += f".{map_identifier}"
|
||||
|
||||
image_subset = get_subset_name(
|
||||
# TODO: The family actually isn't 'texture' currently but for now
|
||||
# this is only done so the subset name starts with 'texture'
|
||||
family="texture",
|
||||
variant=instance.data["variant"] + suffix,
|
||||
task_name=instance.data.get("task"),
|
||||
asset_doc=asset_doc,
|
||||
project_name=context.data["projectName"],
|
||||
host_name=context.data["hostName"],
|
||||
project_settings=context.data["project_settings"]
|
||||
)
|
||||
|
||||
# Prepare representation
|
||||
representation = {
|
||||
"name": ext.lstrip("."),
|
||||
"ext": ext.lstrip("."),
|
||||
"files": fnames if len(fnames) > 1 else fnames[0],
|
||||
}
|
||||
|
||||
# Mark as UDIM explicitly if it has UDIM tiles.
|
||||
if bool(outputs[0].get("udim")):
|
||||
# The representation for a UDIM sequence should have a `udim` key
|
||||
# that is a list of all udim tiles (str) like: ["1001", "1002"]
|
||||
# strings. See CollectTextures plug-in and Integrators.
|
||||
representation["udim"] = [output["udim"] for output in outputs]
|
||||
|
||||
# Set up the representation for thumbnail generation
|
||||
# TODO: Simplify this once thumbnail extraction is refactored
|
||||
staging_dir = os.path.dirname(first_filepath)
|
||||
representation["tags"] = ["review"]
|
||||
representation["stagingDir"] = staging_dir
|
||||
|
||||
# Clone the instance
|
||||
image_instance = context.create_instance(image_subset)
|
||||
image_instance[:] = instance[:]
|
||||
image_instance.data.update(copy.deepcopy(instance.data))
|
||||
image_instance.data["name"] = image_subset
|
||||
image_instance.data["label"] = image_subset
|
||||
image_instance.data["subset"] = image_subset
|
||||
image_instance.data["family"] = "image"
|
||||
image_instance.data["families"] = ["image", "textures"]
|
||||
image_instance.data["representations"] = [representation]
|
||||
|
||||
# Group the textures together in the loader
|
||||
image_instance.data["subsetGroup"] = instance.data["subset"]
|
||||
|
||||
# Store the texture set name and stack name on the instance
|
||||
image_instance.data["textureSetName"] = texture_set_name
|
||||
image_instance.data["textureStackName"] = stack_name
|
||||
|
||||
# Store color space with the instance
|
||||
# Note: The extractor will assign it to the representation
|
||||
colorspace = outputs[0].get("colorSpace")
|
||||
if colorspace:
|
||||
self.log.debug(f"{image_subset} colorspace: {colorspace}")
|
||||
image_instance.data["colorspace"] = colorspace
|
||||
|
||||
# Store the instance in the original instance as a member
|
||||
instance.append(image_instance)
|
||||
|
||||
def get_export_config(self, instance):
|
||||
"""Return an export configuration dict for texture exports.
|
||||
|
||||
This config can be supplied to:
|
||||
- `substance_painter.export.export_project_textures`
|
||||
- `substance_painter.export.list_project_textures`
|
||||
|
||||
See documentation on substance_painter.export module about the
|
||||
formatting of the configuration dictionary.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): Texture Set instance to be
|
||||
published.
|
||||
|
||||
Returns:
|
||||
dict: Export config
|
||||
|
||||
"""
|
||||
|
||||
creator_attrs = instance.data["creator_attributes"]
|
||||
preset_url = creator_attrs["exportPresetUrl"]
|
||||
self.log.debug(f"Exporting using preset: {preset_url}")
|
||||
|
||||
# See: https://substance3d.adobe.com/documentation/ptpy/api/substance_painter/export # noqa
|
||||
config = { # noqa
|
||||
"exportShaderParams": True,
|
||||
"exportPath": publish.get_instance_staging_dir(instance),
|
||||
"defaultExportPreset": preset_url,
|
||||
|
||||
# Custom overrides to the exporter
|
||||
"exportParameters": [
|
||||
{
|
||||
"parameters": {
|
||||
"fileFormat": creator_attrs["exportFileFormat"],
|
||||
"sizeLog2": creator_attrs["exportSize"],
|
||||
"paddingAlgorithm": creator_attrs["exportPadding"],
|
||||
"dilationDistance": creator_attrs["exportDilationDistance"] # noqa
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
# Create the list of Texture Sets to export.
|
||||
config["exportList"] = []
|
||||
for texture_set in substance_painter.textureset.all_texture_sets():
|
||||
config["exportList"].append({"rootPath": texture_set.name()})
|
||||
|
||||
# Consider None values from the creator attributes optionals
|
||||
for override in config["exportParameters"]:
|
||||
parameters = override.get("parameters")
|
||||
for key, value in dict(parameters).items():
|
||||
if value is None:
|
||||
parameters.pop(key)
|
||||
|
||||
return config
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectWorkfileRepresentation(pyblish.api.InstancePlugin):
|
||||
"""Create a publish representation for the current workfile instance."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Workfile representation"
|
||||
hosts = ["substancepainter"]
|
||||
families = ["workfile"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
context = instance.context
|
||||
current_file = context.data["currentFile"]
|
||||
|
||||
folder, file = os.path.split(current_file)
|
||||
filename, ext = os.path.splitext(file)
|
||||
|
||||
instance.data["representations"] = [{
|
||||
"name": ext.lstrip("."),
|
||||
"ext": ext.lstrip("."),
|
||||
"files": file,
|
||||
"stagingDir": folder,
|
||||
}]
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
import substance_painter.export
|
||||
|
||||
from openpype.pipeline import KnownPublishError, publish
|
||||
|
||||
|
||||
class ExtractTextures(publish.Extractor,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
"""Extract Textures using an output template config.
|
||||
|
||||
Note:
|
||||
This Extractor assumes that `collect_textureset_images` has prepared
|
||||
the relevant export config and has also collected the individual image
|
||||
instances for publishing including its representation. That is why this
|
||||
particular Extractor doesn't specify representations to integrate.
|
||||
|
||||
"""
|
||||
|
||||
label = "Extract Texture Set"
|
||||
hosts = ["substancepainter"]
|
||||
families = ["textureSet"]
|
||||
|
||||
# Run before thumbnail extractors
|
||||
order = publish.Extractor.order - 0.1
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
config = instance.data["exportConfig"]
|
||||
result = substance_painter.export.export_project_textures(config)
|
||||
|
||||
if result.status != substance_painter.export.ExportStatus.Success:
|
||||
raise KnownPublishError(
|
||||
"Failed to export texture set: {}".format(result.message)
|
||||
)
|
||||
|
||||
# Log what files we generated
|
||||
for (texture_set_name, stack_name), maps in result.textures.items():
|
||||
# Log our texture outputs
|
||||
self.log.info(f"Exported stack: {texture_set_name} {stack_name}")
|
||||
for texture_map in maps:
|
||||
self.log.info(f"Exported texture: {texture_map}")
|
||||
|
||||
# We'll insert the color space data for each image instance that we
|
||||
# added into this texture set. The collector couldn't do so because
|
||||
# some anatomy and other instance data needs to be collected prior
|
||||
context = instance.context
|
||||
for image_instance in instance:
|
||||
representation = next(iter(image_instance.data["representations"]))
|
||||
|
||||
colorspace = image_instance.data.get("colorspace")
|
||||
if not colorspace:
|
||||
self.log.debug("No color space data present for instance: "
|
||||
f"{image_instance}")
|
||||
continue
|
||||
|
||||
self.set_representation_colorspace(representation,
|
||||
context=context,
|
||||
colorspace=colorspace)
|
||||
|
||||
# The TextureSet instance should not be integrated. It generates no
|
||||
# output data. Instead the separated texture instances are generated
|
||||
# from it which themselves integrate into the database.
|
||||
instance.data["integrate"] = False
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.lib import version_up
|
||||
from openpype.pipeline import registered_host
|
||||
|
||||
|
||||
class IncrementWorkfileVersion(pyblish.api.ContextPlugin):
|
||||
"""Increment current workfile version."""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 1
|
||||
label = "Increment Workfile Version"
|
||||
optional = True
|
||||
hosts = ["substancepainter"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
assert all(result["success"] for result in context.data["results"]), (
|
||||
"Publishing not successful so version is not increased.")
|
||||
|
||||
host = registered_host()
|
||||
path = context.data["currentFile"]
|
||||
self.log.info(f"Incrementing current workfile to: {path}")
|
||||
host.save_workfile(version_up(path))
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import (
|
||||
registered_host,
|
||||
KnownPublishError
|
||||
)
|
||||
|
||||
|
||||
class SaveCurrentWorkfile(pyblish.api.ContextPlugin):
|
||||
"""Save current workfile"""
|
||||
|
||||
label = "Save current workfile"
|
||||
order = pyblish.api.ExtractorOrder - 0.49
|
||||
hosts = ["substancepainter"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
host = registered_host()
|
||||
if context.data["currentFile"] != host.get_current_workfile():
|
||||
raise KnownPublishError("Workfile has changed during publishing!")
|
||||
|
||||
if host.has_unsaved_changes():
|
||||
self.log.info("Saving current file..")
|
||||
host.save_workfile()
|
||||
else:
|
||||
self.log.debug("Skipping workfile save because there are no "
|
||||
"unsaved changes.")
|
||||
|
|
@ -0,0 +1,109 @@
|
|||
import copy
|
||||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
import substance_painter.export
|
||||
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateOutputMaps(pyblish.api.InstancePlugin):
|
||||
"""Validate all output maps for Output Template are generated.
|
||||
|
||||
Output maps will be skipped by Substance Painter if it is an output
|
||||
map in the Substance Output Template which uses channels that the current
|
||||
substance painter project has not painted or generated.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate output maps"
|
||||
hosts = ["substancepainter"]
|
||||
families = ["textureSet"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
config = instance.data["exportConfig"]
|
||||
|
||||
# Substance Painter API does not allow to query the actual output maps
|
||||
# it will generate without actually exporting the files. So we try to
|
||||
# generate the smallest size / fastest export as possible
|
||||
config = copy.deepcopy(config)
|
||||
parameters = config["exportParameters"][0]["parameters"]
|
||||
parameters["sizeLog2"] = [1, 1] # output 2x2 images (smallest)
|
||||
parameters["paddingAlgorithm"] = "passthrough" # no dilation (faster)
|
||||
parameters["dithering"] = False # no dithering (faster)
|
||||
|
||||
result = substance_painter.export.export_project_textures(config)
|
||||
if result.status != substance_painter.export.ExportStatus.Success:
|
||||
raise PublishValidationError(
|
||||
"Failed to export texture set: {}".format(result.message)
|
||||
)
|
||||
|
||||
generated_files = set()
|
||||
for texture_maps in result.textures.values():
|
||||
for texture_map in texture_maps:
|
||||
generated_files.add(os.path.normpath(texture_map))
|
||||
# Directly clean up our temporary export
|
||||
os.remove(texture_map)
|
||||
|
||||
creator_attributes = instance.data.get("creator_attributes", {})
|
||||
allow_skipped_maps = creator_attributes.get("allowSkippedMaps", True)
|
||||
error_report_missing = []
|
||||
for image_instance in instance:
|
||||
|
||||
# Confirm whether the instance has its expected files generated.
|
||||
# We assume there's just one representation and that it is
|
||||
# the actual texture representation from the collector.
|
||||
representation = next(iter(image_instance.data["representations"]))
|
||||
staging_dir = representation["stagingDir"]
|
||||
filenames = representation["files"]
|
||||
if not isinstance(filenames, (list, tuple)):
|
||||
# Convert single file to list
|
||||
filenames = [filenames]
|
||||
|
||||
missing = []
|
||||
for filename in filenames:
|
||||
filepath = os.path.join(staging_dir, filename)
|
||||
filepath = os.path.normpath(filepath)
|
||||
if filepath not in generated_files:
|
||||
self.log.warning(f"Missing texture: {filepath}")
|
||||
missing.append(filepath)
|
||||
|
||||
if not missing:
|
||||
continue
|
||||
|
||||
if allow_skipped_maps:
|
||||
# TODO: This is changing state on the instance's which
|
||||
# should not be done during validation.
|
||||
self.log.warning(f"Disabling texture instance: "
|
||||
f"{image_instance}")
|
||||
image_instance.data["active"] = False
|
||||
image_instance.data["integrate"] = False
|
||||
representation.setdefault("tags", []).append("delete")
|
||||
continue
|
||||
else:
|
||||
error_report_missing.append((image_instance, missing))
|
||||
|
||||
if error_report_missing:
|
||||
|
||||
message = (
|
||||
"The Texture Set skipped exporting some output maps which are "
|
||||
"defined in the Output Template. This happens if the Output "
|
||||
"Templates exports maps from channels which you do not "
|
||||
"have in your current Substance Painter project.\n\n"
|
||||
"To allow this enable the *Allow Skipped Output Maps* setting "
|
||||
"on the instance.\n\n"
|
||||
f"Instance {instance} skipped exporting output maps:\n"
|
||||
""
|
||||
)
|
||||
|
||||
for image_instance, missing in error_report_missing:
|
||||
missing_str = ", ".join(missing)
|
||||
message += f"- **{image_instance}** skipped: {missing_str}\n"
|
||||
|
||||
raise PublishValidationError(
|
||||
message=message,
|
||||
title="Missing output maps"
|
||||
)
|
||||
|
|
@ -438,7 +438,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"Finished copying %i files" % len(resource_files))
|
||||
|
||||
def _create_instances_for_aov(
|
||||
self, instance_data, exp_files, additional_data
|
||||
self, instance_data, exp_files, additional_data, do_not_add_review
|
||||
):
|
||||
"""Create instance for each AOV found.
|
||||
|
||||
|
|
@ -449,6 +449,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
instance_data (pyblish.plugin.Instance): skeleton data for instance
|
||||
(those needed) later by collector
|
||||
exp_files (list): list of expected files divided by aovs
|
||||
additional_data (dict):
|
||||
do_not_add_review (bool): explicitly skip review
|
||||
|
||||
Returns:
|
||||
list of instances
|
||||
|
|
@ -514,8 +516,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
|
||||
app = os.environ.get("AVALON_APP", "")
|
||||
|
||||
preview = False
|
||||
|
||||
if isinstance(col, list):
|
||||
render_file_name = os.path.basename(col[0])
|
||||
else:
|
||||
|
|
@ -532,6 +532,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
new_instance = deepcopy(instance_data)
|
||||
new_instance["subset"] = subset_name
|
||||
new_instance["subsetGroup"] = group_name
|
||||
|
||||
preview = preview and not do_not_add_review
|
||||
if preview:
|
||||
new_instance["review"] = True
|
||||
|
||||
|
|
@ -591,7 +593,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
self.log.debug("instances:{}".format(instances))
|
||||
return instances
|
||||
|
||||
def _get_representations(self, instance, exp_files):
|
||||
def _get_representations(self, instance, exp_files, do_not_add_review):
|
||||
"""Create representations for file sequences.
|
||||
|
||||
This will return representations of expected files if they are not
|
||||
|
|
@ -602,6 +604,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
instance (dict): instance data for which we are
|
||||
setting representations
|
||||
exp_files (list): list of expected files
|
||||
do_not_add_review (bool): explicitly skip review
|
||||
|
||||
Returns:
|
||||
list of representations
|
||||
|
|
@ -651,6 +654,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
if instance.get("slate"):
|
||||
frame_start -= 1
|
||||
|
||||
preview = preview and not do_not_add_review
|
||||
rep = {
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
|
|
@ -705,6 +709,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
preview = match_aov_pattern(
|
||||
host_name, self.aov_filter, remainder
|
||||
)
|
||||
preview = preview and not do_not_add_review
|
||||
if preview:
|
||||
rep.update({
|
||||
"fps": instance.get("fps"),
|
||||
|
|
@ -820,8 +825,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
families = [family]
|
||||
|
||||
# pass review to families if marked as review
|
||||
do_not_add_review = False
|
||||
if data.get("review"):
|
||||
families.append("review")
|
||||
elif data.get("review") == False:
|
||||
self.log.debug("Instance has review explicitly disabled.")
|
||||
do_not_add_review = True
|
||||
|
||||
instance_skeleton_data = {
|
||||
"family": family,
|
||||
|
|
@ -977,7 +986,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
instances = self._create_instances_for_aov(
|
||||
instance_skeleton_data,
|
||||
data.get("expectedFiles"),
|
||||
additional_data
|
||||
additional_data,
|
||||
do_not_add_review
|
||||
)
|
||||
self.log.info("got {} instance{}".format(
|
||||
len(instances),
|
||||
|
|
@ -986,7 +996,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
else:
|
||||
representations = self._get_representations(
|
||||
instance_skeleton_data,
|
||||
data.get("expectedFiles")
|
||||
data.get("expectedFiles"),
|
||||
do_not_add_review
|
||||
)
|
||||
|
||||
if "representations" not in instance_skeleton_data.keys():
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ class RenderInstance(object):
|
|||
# With default values
|
||||
# metadata
|
||||
renderer = attr.ib(default="") # renderer - can be used in Deadline
|
||||
review = attr.ib(default=False) # generate review from instance (bool)
|
||||
review = attr.ib(default=None) # False - explicitly skip review
|
||||
priority = attr.ib(default=50) # job priority on farm
|
||||
|
||||
family = attr.ib(default="renderlayer")
|
||||
|
|
|
|||
|
|
@ -19,9 +19,9 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.ExtractorOrder
|
||||
families = [
|
||||
"imagesequence", "render", "render2d", "prerender",
|
||||
"source", "clip", "take", "online"
|
||||
"source", "clip", "take", "online", "image"
|
||||
]
|
||||
hosts = ["shell", "fusion", "resolve", "traypublisher"]
|
||||
hosts = ["shell", "fusion", "resolve", "traypublisher", "substancepainter"]
|
||||
enabled = False
|
||||
|
||||
# presetable attribute
|
||||
|
|
|
|||
|
|
@ -163,6 +163,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
"Instance is marked to be processed on farm. Skipping")
|
||||
return
|
||||
|
||||
# Instance is marked to not get integrated
|
||||
if not instance.data.get("integrate", True):
|
||||
self.log.info("Instance is marked to skip integrating. Skipping")
|
||||
return
|
||||
|
||||
filtered_repres = self.filter_representations(instance)
|
||||
# Skip instance if there are not representations to integrate
|
||||
# all representations should not be integrated
|
||||
|
|
|
|||
BIN
openpype/resources/app_icons/substancepainter.png
Normal file
BIN
openpype/resources/app_icons/substancepainter.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 104 KiB |
|
|
@ -13,10 +13,14 @@
|
|||
"RenderCreator": {
|
||||
"defaults": [
|
||||
"Main"
|
||||
]
|
||||
],
|
||||
"mark_for_review": true
|
||||
}
|
||||
},
|
||||
"publish": {
|
||||
"CollectReview": {
|
||||
"enabled": true
|
||||
},
|
||||
"ValidateSceneSettings": {
|
||||
"enabled": true,
|
||||
"optional": true,
|
||||
|
|
|
|||
|
|
@ -1460,7 +1460,8 @@
|
|||
},
|
||||
"reference_loader": {
|
||||
"namespace": "{asset_name}_{subset}_##_",
|
||||
"group_name": "_GRP"
|
||||
"group_name": "_GRP",
|
||||
"display_handle": true
|
||||
}
|
||||
},
|
||||
"workfile_build": {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"imageio": {
|
||||
"ocio_config": {
|
||||
"enabled": true,
|
||||
"filepath": []
|
||||
},
|
||||
"file_rules": {
|
||||
"enabled": true,
|
||||
"rules": {}
|
||||
}
|
||||
},
|
||||
"shelves": {}
|
||||
}
|
||||
|
|
@ -1479,6 +1479,33 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"substancepainter": {
|
||||
"enabled": true,
|
||||
"label": "Substance Painter",
|
||||
"icon": "app_icons/substancepainter.png",
|
||||
"host_name": "substancepainter",
|
||||
"environment": {},
|
||||
"variants": {
|
||||
"8-2-0": {
|
||||
"executables": {
|
||||
"windows": [
|
||||
"C:\\Program Files\\Adobe\\Adobe Substance 3D Painter\\Adobe Substance 3D Painter.exe"
|
||||
],
|
||||
"darwin": [],
|
||||
"linux": []
|
||||
},
|
||||
"arguments": {
|
||||
"windows": [],
|
||||
"darwin": [],
|
||||
"linux": []
|
||||
},
|
||||
"environment": {}
|
||||
},
|
||||
"__dynamic_keys_labels__": {
|
||||
"8-2-0": "8.2.0"
|
||||
}
|
||||
}
|
||||
},
|
||||
"unreal": {
|
||||
"enabled": true,
|
||||
"label": "Unreal Editor",
|
||||
|
|
|
|||
|
|
@ -168,6 +168,7 @@ class HostsEnumEntity(BaseEnumEntity):
|
|||
"tvpaint",
|
||||
"unreal",
|
||||
"standalonepublisher",
|
||||
"substancepainter",
|
||||
"traypublisher",
|
||||
"webpublisher"
|
||||
]
|
||||
|
|
|
|||
|
|
@ -122,6 +122,10 @@
|
|||
"type": "schema",
|
||||
"name": "schema_project_photoshop"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_project_substancepainter"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_project_harmony"
|
||||
|
|
|
|||
|
|
@ -40,7 +40,13 @@
|
|||
"label": "Default Variants",
|
||||
"object_type": "text",
|
||||
"docstring": "Fill default variant(s) (like 'Main' or 'Default') used in subset name creation."
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "mark_for_review",
|
||||
"label": "Review",
|
||||
"default": true
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
|
|
@ -51,6 +57,21 @@
|
|||
"key": "publish",
|
||||
"label": "Publish plugins",
|
||||
"children": [
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "CollectReview",
|
||||
"label": "Collect Review",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled",
|
||||
"default": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "substancepainter",
|
||||
"label": "Substance Painter",
|
||||
"is_file": true,
|
||||
"children": [
|
||||
{
|
||||
"key": "imageio",
|
||||
"type": "dict",
|
||||
"label": "Color Management (ImageIO)",
|
||||
"is_group": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_imageio_config"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_imageio_file_rules"
|
||||
}
|
||||
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict-modifiable",
|
||||
"key": "shelves",
|
||||
"label": "Shelves",
|
||||
"use_label_wrap": true,
|
||||
"object_type": {
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -111,6 +111,14 @@
|
|||
{
|
||||
"type": "label",
|
||||
"label": "Here's a link to the doc where you can find explanations about customing the naming of referenced assets: https://openpype.io/docs/admin_hosts_maya#load-plugins"
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "display_handle",
|
||||
"label": "Display Handle On Load References"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,40 @@
|
|||
{
|
||||
"type": "dict",
|
||||
"key": "substancepainter",
|
||||
"label": "Substance Painter",
|
||||
"collapsible": true,
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "schema_template",
|
||||
"name": "template_host_unchangables"
|
||||
},
|
||||
{
|
||||
"key": "environment",
|
||||
"label": "Environment",
|
||||
"type": "raw-json"
|
||||
},
|
||||
{
|
||||
"type": "dict-modifiable",
|
||||
"key": "variants",
|
||||
"collapsible_key": true,
|
||||
"use_label_wrap": false,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "schema_template",
|
||||
"name": "template_host_variant_items",
|
||||
"skip_paths": ["use_python_2"]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -93,6 +93,10 @@
|
|||
"type": "schema",
|
||||
"name": "schema_celaction"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_substancepainter"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_unreal"
|
||||
|
|
|
|||
|
|
@ -132,6 +132,7 @@ QPushButton {
|
|||
border-radius: 0.2em;
|
||||
padding: 3px 5px 3px 5px;
|
||||
background: {color:bg-buttons};
|
||||
min-width: 0px; /* Substance Painter fix */
|
||||
}
|
||||
|
||||
QPushButton:hover {
|
||||
|
|
@ -337,7 +338,15 @@ QTabWidget::tab-bar {
|
|||
alignment: left;
|
||||
}
|
||||
|
||||
/* avoid QTabBar overrides in Substance Painter */
|
||||
QTabBar {
|
||||
text-transform: none;
|
||||
font-weight: normal;
|
||||
}
|
||||
|
||||
QTabBar::tab {
|
||||
text-transform: none;
|
||||
font-weight: normal;
|
||||
border-top: 1px solid {color:border};
|
||||
border-left: 1px solid {color:border};
|
||||
border-right: 1px solid {color:border};
|
||||
|
|
@ -377,6 +386,7 @@ QHeaderView {
|
|||
QHeaderView::section {
|
||||
background: {color:bg-view-header};
|
||||
padding: 4px;
|
||||
border-top: 0px; /* Substance Painter fix */
|
||||
border-right: 1px solid {color:bg-view};
|
||||
border-radius: 0px;
|
||||
text-align: center;
|
||||
|
|
|
|||
|
|
@ -9,6 +9,9 @@ log = logging.getLogger("test_publish_in_aftereffects")
|
|||
class TestDeadlinePublishInAfterEffectsMultiComposition(AEDeadlinePublishTestClass): # noqa
|
||||
"""est case for DL publishing in AfterEffects with multiple compositions.
|
||||
|
||||
Workfile contains 2 prepared `render` instances. First has review set,
|
||||
second doesn't.
|
||||
|
||||
Uses generic TestCase to prepare fixtures for test data, testing DBs,
|
||||
env vars.
|
||||
|
||||
|
|
@ -68,7 +71,7 @@ class TestDeadlinePublishInAfterEffectsMultiComposition(AEDeadlinePublishTestCla
|
|||
name="renderTest_taskMain2"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 7))
|
||||
DBAssert.count_of_types(dbcon, "representation", 5))
|
||||
|
||||
additional_args = {"context.subset": "workfileTest_task",
|
||||
"context.ext": "aep"}
|
||||
|
|
@ -105,13 +108,13 @@ class TestDeadlinePublishInAfterEffectsMultiComposition(AEDeadlinePublishTestCla
|
|||
additional_args = {"context.subset": "renderTest_taskMain2",
|
||||
"name": "thumbnail"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
DBAssert.count_of_types(dbcon, "representation", 0,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "renderTest_taskMain2",
|
||||
"name": "png_exr"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
DBAssert.count_of_types(dbcon, "representation", 0,
|
||||
additional_args=additional_args))
|
||||
|
||||
assert not any(failures)
|
||||
|
|
|
|||
107
website/docs/artist_hosts_substancepainter.md
Normal file
107
website/docs/artist_hosts_substancepainter.md
Normal file
|
|
@ -0,0 +1,107 @@
|
|||
---
|
||||
id: artist_hosts_substancepainter
|
||||
title: Substance Painter
|
||||
sidebar_label: Substance Painter
|
||||
---
|
||||
|
||||
## OpenPype global tools
|
||||
|
||||
- [Work Files](artist_tools.md#workfiles)
|
||||
- [Load](artist_tools.md#loader)
|
||||
- [Manage (Inventory)](artist_tools.md#inventory)
|
||||
- [Publish](artist_tools.md#publisher)
|
||||
- [Library Loader](artist_tools.md#library-loader)
|
||||
|
||||
## Working with OpenPype in Substance Painter
|
||||
|
||||
The Substance Painter OpenPype integration allows you to:
|
||||
|
||||
- Set the project mesh and easily keep it in sync with updates of the model
|
||||
- Easily export your textures as versioned publishes for others to load and update.
|
||||
|
||||
## Setting the project mesh
|
||||
|
||||
Substance Painter requires a project file to have a mesh path configured.
|
||||
As such, you can't start a workfile without choosing a mesh path.
|
||||
|
||||
To start a new project using a published model you can _without an open project_
|
||||
use OpenPype > Load.. > Load Mesh on a supported publish. This will prompt you
|
||||
with a New Project prompt preset to that particular mesh file.
|
||||
|
||||
If you already have a project open, you can also replace (reload) your mesh
|
||||
using the same Load Mesh functionality.
|
||||
|
||||
After having the project mesh loaded or reloaded through the loader
|
||||
tool the mesh will be _managed_ by OpenPype. For example, you'll be notified
|
||||
on workfile open whether the mesh in your workfile is outdated. You can also
|
||||
set it to specific version using OpenPype > Manage.. where you can right click
|
||||
on the project mesh to perform _Set Version_
|
||||
|
||||
:::info
|
||||
A Substance Painter project will always have only one mesh set. Whenever you
|
||||
trigger _Load Mesh_ from the loader this will **replace** your currently loaded
|
||||
mesh for your open project.
|
||||
:::
|
||||
|
||||
## Publishing textures
|
||||
|
||||
To publish your textures we must first create a `textureSet`
|
||||
publish instance.
|
||||
|
||||
To create a **TextureSet instance** we will use OpenPype's publisher tool. Go
|
||||
to **OpenPype → Publish... → TextureSet**
|
||||
|
||||
The texture set instance will define what Substance Painter export template (`.spexp`) to
|
||||
use and thus defines what texture maps will be exported from your workfile. This
|
||||
can be set with the **Output Template** attribute on the instance.
|
||||
|
||||
:::info
|
||||
The TextureSet instance gets saved with your Substance Painter project. As such,
|
||||
you will only need to configure this once for your workfile. Next time you can
|
||||
just click **OpenPype → Publish...** and start publishing directly with the
|
||||
same settings.
|
||||
:::
|
||||
|
||||
#### Publish per output map of the Substance Painter preset
|
||||
|
||||
The Texture Set instance generates a publish per output map that is defined in
|
||||
the Substance Painter's export preset. For example a publish from a default
|
||||
PBR Metallic Roughness texture set results in six separate published subsets
|
||||
(if all the channels exist in your file).
|
||||
|
||||

|
||||
|
||||
When publishing for example a texture set with variant **Main** six instances will
|
||||
be published with the variants:
|
||||
- Main.**BaseColor**
|
||||
- Main.**Emissive**
|
||||
- Main.**Height**
|
||||
- Main.**Metallic**
|
||||
- Main.**Normal**
|
||||
- Main.**Roughness**
|
||||
|
||||
The bold output map name for the publish is based on the string that is pulled
|
||||
from the what is considered to be the static part of the filename templates in
|
||||
the export preset. The tokens like `$mesh` and `(_$colorSpace)` are ignored.
|
||||
So `$mesh_$textureSet_BaseColor(_$colorSpace)(.$udim)` becomes `BaseColor`.
|
||||
|
||||
An example output for PBR Metallic Roughness would be:
|
||||
|
||||

|
||||
|
||||
## Known issues
|
||||
|
||||
#### Can't see the OpenPype menu?
|
||||
|
||||
If you're unable to see the OpenPype top level menu in Substance Painter make
|
||||
sure you have launched Substance Painter through OpenPype and that the OpenPype
|
||||
Integration plug-in is loaded inside Substance Painter: **Python > openpype_plugin**
|
||||
|
||||
#### Substance Painter + Steam
|
||||
|
||||
Running the steam version of Substance Painter within OpenPype will require you
|
||||
to close the Steam executable before launching Substance Painter through OpenPype.
|
||||
Otherwise the Substance Painter process is launched using Steam's existing
|
||||
environment and thus will not be able to pick up the pipeline integration.
|
||||
|
||||
This appears to be a limitation of how Steam works.
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 45 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 7.3 KiB |
Loading…
Add table
Add a link
Reference in a new issue