diff --git a/CHANGELOG.md b/CHANGELOG.md
index 707b61676f..3cca692b68 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,8 +1,88 @@
# Changelog
-## [3.14.6](https://github.com/pypeclub/OpenPype/tree/HEAD)
+## [3.14.8](https://github.com/pypeclub/OpenPype/tree/3.14.8)
-[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...HEAD)
+[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.7...3.14.8)
+
+**🚀 Enhancements**
+
+- General: Refactored extract hierarchy plugin [\#4139](https://github.com/pypeclub/OpenPype/pull/4139)
+- General: Find executable enhancement [\#4137](https://github.com/pypeclub/OpenPype/pull/4137)
+- Ftrack: Reset session before instance processing [\#4129](https://github.com/pypeclub/OpenPype/pull/4129)
+- Ftrack: Editorial asset sync issue [\#4126](https://github.com/pypeclub/OpenPype/pull/4126)
+- Deadline: Build version resolving [\#4115](https://github.com/pypeclub/OpenPype/pull/4115)
+- Houdini: New Publisher [\#3046](https://github.com/pypeclub/OpenPype/pull/3046)
+- Fix: Standalone Publish Directories [\#4148](https://github.com/pypeclub/OpenPype/pull/4148)
+
+**🐛 Bug fixes**
+
+- Ftrack: Fix occational double parents issue [\#4153](https://github.com/pypeclub/OpenPype/pull/4153)
+- General: Maketx executable issue [\#4136](https://github.com/pypeclub/OpenPype/pull/4136)
+- Maya: Looks - add all connections [\#4135](https://github.com/pypeclub/OpenPype/pull/4135)
+- General: Fix variable check in collect anatomy instance data [\#4117](https://github.com/pypeclub/OpenPype/pull/4117)
+
+
+## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7)
+
+[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.6...3.14.7)
+
+**🆕 New features**
+
+- Hiero: loading effect family to timeline [\#4055](https://github.com/pypeclub/OpenPype/pull/4055)
+
+**🚀 Enhancements**
+
+- Photoshop: bug with pop-up window on Instance Creator [\#4121](https://github.com/pypeclub/OpenPype/pull/4121)
+- Publisher: Open on specific tab [\#4120](https://github.com/pypeclub/OpenPype/pull/4120)
+- Publisher: Hide unknown publish values [\#4116](https://github.com/pypeclub/OpenPype/pull/4116)
+- Ftrack: Event server status give more information about version locations [\#4112](https://github.com/pypeclub/OpenPype/pull/4112)
+- General: Allow higher numbers in frames and clips [\#4101](https://github.com/pypeclub/OpenPype/pull/4101)
+- Publisher: Settings for validate frame range [\#4097](https://github.com/pypeclub/OpenPype/pull/4097)
+- Publisher: Ignore escape button [\#4090](https://github.com/pypeclub/OpenPype/pull/4090)
+- Flame: Loading clip with native colorspace resolved from mapping [\#4079](https://github.com/pypeclub/OpenPype/pull/4079)
+- General: Extract review single frame output [\#4064](https://github.com/pypeclub/OpenPype/pull/4064)
+- Publisher: Prepared common function for instance data cache [\#4063](https://github.com/pypeclub/OpenPype/pull/4063)
+- Publisher: Easy access to publish page from create page [\#4058](https://github.com/pypeclub/OpenPype/pull/4058)
+- General/TVPaint: Attribute defs dialog [\#4052](https://github.com/pypeclub/OpenPype/pull/4052)
+- Publisher: Better reset defer [\#4048](https://github.com/pypeclub/OpenPype/pull/4048)
+- Publisher: Add thumbnail sources [\#4042](https://github.com/pypeclub/OpenPype/pull/4042)
+
+**🐛 Bug fixes**
+
+- General: Move default settings for template name [\#4119](https://github.com/pypeclub/OpenPype/pull/4119)
+- Slack: notification fail in new tray publisher [\#4118](https://github.com/pypeclub/OpenPype/pull/4118)
+- Nuke: loaded nodes set to first tab [\#4114](https://github.com/pypeclub/OpenPype/pull/4114)
+- Nuke: load image first frame [\#4113](https://github.com/pypeclub/OpenPype/pull/4113)
+- Files Widget: Ignore case sensitivity of extensions [\#4096](https://github.com/pypeclub/OpenPype/pull/4096)
+- Webpublisher: extension is lowercased in Setting and in uploaded files [\#4095](https://github.com/pypeclub/OpenPype/pull/4095)
+- Publish Report Viewer: Fix small bugs [\#4086](https://github.com/pypeclub/OpenPype/pull/4086)
+- Igniter: fix regex to match semver better [\#4085](https://github.com/pypeclub/OpenPype/pull/4085)
+- Maya: aov filtering [\#4083](https://github.com/pypeclub/OpenPype/pull/4083)
+- Flame/Flare: Loading to multiple batches [\#4080](https://github.com/pypeclub/OpenPype/pull/4080)
+- hiero: creator from settings with set maximum [\#4077](https://github.com/pypeclub/OpenPype/pull/4077)
+- Nuke: resolve hashes in file name only for frame token [\#4074](https://github.com/pypeclub/OpenPype/pull/4074)
+- Publisher: Fix cache of asset docs [\#4070](https://github.com/pypeclub/OpenPype/pull/4070)
+- Webpublisher: cleanup wp extract thumbnail [\#4067](https://github.com/pypeclub/OpenPype/pull/4067)
+- Settings UI: Locked setting can't bypass lock [\#4066](https://github.com/pypeclub/OpenPype/pull/4066)
+- Loader: Fix comparison of repre name [\#4053](https://github.com/pypeclub/OpenPype/pull/4053)
+- Deadline: Extract environment subprocess failure [\#4050](https://github.com/pypeclub/OpenPype/pull/4050)
+
+**🔀 Refactored code**
+
+- General: Collect entities plugin minor changes [\#4089](https://github.com/pypeclub/OpenPype/pull/4089)
+- General: Direct interfaces import [\#4065](https://github.com/pypeclub/OpenPype/pull/4065)
+
+**Merged pull requests:**
+
+- Bump loader-utils from 1.4.1 to 1.4.2 in /website [\#4100](https://github.com/pypeclub/OpenPype/pull/4100)
+- Online family for Tray Publisher [\#4093](https://github.com/pypeclub/OpenPype/pull/4093)
+- Bump loader-utils from 1.4.0 to 1.4.1 in /website [\#4081](https://github.com/pypeclub/OpenPype/pull/4081)
+- remove underscore from subset name [\#4059](https://github.com/pypeclub/OpenPype/pull/4059)
+- Alembic Loader as Arnold Standin [\#4047](https://github.com/pypeclub/OpenPype/pull/4047)
+
+## [3.14.6](https://github.com/pypeclub/OpenPype/tree/3.14.6)
+
+[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...3.14.6)
### 📖 Documentation
diff --git a/HISTORY.md b/HISTORY.md
index f6cc74e114..f4e132488b 100644
--- a/HISTORY.md
+++ b/HISTORY.md
@@ -1,5 +1,119 @@
# Changelog
+## [3.14.8](https://github.com/pypeclub/OpenPype/tree/3.14.8)
+
+[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.7...3.14.8)
+
+**🚀 Enhancements**
+
+- General: Refactored extract hierarchy plugin [\#4139](https://github.com/pypeclub/OpenPype/pull/4139)
+- General: Find executable enhancement [\#4137](https://github.com/pypeclub/OpenPype/pull/4137)
+- Ftrack: Reset session before instance processing [\#4129](https://github.com/pypeclub/OpenPype/pull/4129)
+- Ftrack: Editorial asset sync issue [\#4126](https://github.com/pypeclub/OpenPype/pull/4126)
+- Deadline: Build version resolving [\#4115](https://github.com/pypeclub/OpenPype/pull/4115)
+- Houdini: New Publisher [\#3046](https://github.com/pypeclub/OpenPype/pull/3046)
+- Fix: Standalone Publish Directories [\#4148](https://github.com/pypeclub/OpenPype/pull/4148)
+
+**🐛 Bug fixes**
+
+- Ftrack: Fix occational double parents issue [\#4153](https://github.com/pypeclub/OpenPype/pull/4153)
+- General: Maketx executable issue [\#4136](https://github.com/pypeclub/OpenPype/pull/4136)
+- Maya: Looks - add all connections [\#4135](https://github.com/pypeclub/OpenPype/pull/4135)
+- General: Fix variable check in collect anatomy instance data [\#4117](https://github.com/pypeclub/OpenPype/pull/4117)
+
+## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7)
+
+[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.6...3.14.7)
+
+**🆕 New features**
+
+- Hiero: loading effect family to timeline [\#4055](https://github.com/pypeclub/OpenPype/pull/4055)
+
+**🚀 Enhancements**
+
+- Photoshop: bug with pop-up window on Instance Creator [\#4121](https://github.com/pypeclub/OpenPype/pull/4121)
+- Publisher: Open on specific tab [\#4120](https://github.com/pypeclub/OpenPype/pull/4120)
+- Publisher: Hide unknown publish values [\#4116](https://github.com/pypeclub/OpenPype/pull/4116)
+- Ftrack: Event server status give more information about version locations [\#4112](https://github.com/pypeclub/OpenPype/pull/4112)
+- General: Allow higher numbers in frames and clips [\#4101](https://github.com/pypeclub/OpenPype/pull/4101)
+- Publisher: Settings for validate frame range [\#4097](https://github.com/pypeclub/OpenPype/pull/4097)
+- Publisher: Ignore escape button [\#4090](https://github.com/pypeclub/OpenPype/pull/4090)
+- Flame: Loading clip with native colorspace resolved from mapping [\#4079](https://github.com/pypeclub/OpenPype/pull/4079)
+- General: Extract review single frame output [\#4064](https://github.com/pypeclub/OpenPype/pull/4064)
+- Publisher: Prepared common function for instance data cache [\#4063](https://github.com/pypeclub/OpenPype/pull/4063)
+- Publisher: Easy access to publish page from create page [\#4058](https://github.com/pypeclub/OpenPype/pull/4058)
+- General/TVPaint: Attribute defs dialog [\#4052](https://github.com/pypeclub/OpenPype/pull/4052)
+- Publisher: Better reset defer [\#4048](https://github.com/pypeclub/OpenPype/pull/4048)
+- Publisher: Add thumbnail sources [\#4042](https://github.com/pypeclub/OpenPype/pull/4042)
+
+**🐛 Bug fixes**
+
+- General: Move default settings for template name [\#4119](https://github.com/pypeclub/OpenPype/pull/4119)
+- Slack: notification fail in new tray publisher [\#4118](https://github.com/pypeclub/OpenPype/pull/4118)
+- Nuke: loaded nodes set to first tab [\#4114](https://github.com/pypeclub/OpenPype/pull/4114)
+- Nuke: load image first frame [\#4113](https://github.com/pypeclub/OpenPype/pull/4113)
+- Files Widget: Ignore case sensitivity of extensions [\#4096](https://github.com/pypeclub/OpenPype/pull/4096)
+- Webpublisher: extension is lowercased in Setting and in uploaded files [\#4095](https://github.com/pypeclub/OpenPype/pull/4095)
+- Publish Report Viewer: Fix small bugs [\#4086](https://github.com/pypeclub/OpenPype/pull/4086)
+- Igniter: fix regex to match semver better [\#4085](https://github.com/pypeclub/OpenPype/pull/4085)
+- Maya: aov filtering [\#4083](https://github.com/pypeclub/OpenPype/pull/4083)
+- Flame/Flare: Loading to multiple batches [\#4080](https://github.com/pypeclub/OpenPype/pull/4080)
+- hiero: creator from settings with set maximum [\#4077](https://github.com/pypeclub/OpenPype/pull/4077)
+- Nuke: resolve hashes in file name only for frame token [\#4074](https://github.com/pypeclub/OpenPype/pull/4074)
+- Publisher: Fix cache of asset docs [\#4070](https://github.com/pypeclub/OpenPype/pull/4070)
+- Webpublisher: cleanup wp extract thumbnail [\#4067](https://github.com/pypeclub/OpenPype/pull/4067)
+- Settings UI: Locked setting can't bypass lock [\#4066](https://github.com/pypeclub/OpenPype/pull/4066)
+- Loader: Fix comparison of repre name [\#4053](https://github.com/pypeclub/OpenPype/pull/4053)
+- Deadline: Extract environment subprocess failure [\#4050](https://github.com/pypeclub/OpenPype/pull/4050)
+
+**🔀 Refactored code**
+
+- General: Collect entities plugin minor changes [\#4089](https://github.com/pypeclub/OpenPype/pull/4089)
+- General: Direct interfaces import [\#4065](https://github.com/pypeclub/OpenPype/pull/4065)
+
+**Merged pull requests:**
+
+- Bump loader-utils from 1.4.1 to 1.4.2 in /website [\#4100](https://github.com/pypeclub/OpenPype/pull/4100)
+- Online family for Tray Publisher [\#4093](https://github.com/pypeclub/OpenPype/pull/4093)
+- Bump loader-utils from 1.4.0 to 1.4.1 in /website [\#4081](https://github.com/pypeclub/OpenPype/pull/4081)
+- remove underscore from subset name [\#4059](https://github.com/pypeclub/OpenPype/pull/4059)
+- Alembic Loader as Arnold Standin [\#4047](https://github.com/pypeclub/OpenPype/pull/4047)
+
+## [3.14.6](https://github.com/pypeclub/OpenPype/tree/3.14.6)
+
+[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...3.14.6)
+
+### 📖 Documentation
+
+- Documentation: Minor updates to dev\_requirements.md [\#4025](https://github.com/pypeclub/OpenPype/pull/4025)
+
+**🆕 New features**
+
+- Nuke: add 13.2 variant [\#4041](https://github.com/pypeclub/OpenPype/pull/4041)
+
+**🚀 Enhancements**
+
+- Publish Report Viewer: Store reports locally on machine [\#4040](https://github.com/pypeclub/OpenPype/pull/4040)
+- General: More specific error in burnins script [\#4026](https://github.com/pypeclub/OpenPype/pull/4026)
+- General: Extract review does not crash with old settings overrides [\#4023](https://github.com/pypeclub/OpenPype/pull/4023)
+- Publisher: Convertors for legacy instances [\#4020](https://github.com/pypeclub/OpenPype/pull/4020)
+- workflows: adding milestone creator and assigner [\#4018](https://github.com/pypeclub/OpenPype/pull/4018)
+- Publisher: Catch creator errors [\#4015](https://github.com/pypeclub/OpenPype/pull/4015)
+
+**🐛 Bug fixes**
+
+- Hiero - effect collection fixes [\#4038](https://github.com/pypeclub/OpenPype/pull/4038)
+- Nuke - loader clip correct hash conversion in path [\#4037](https://github.com/pypeclub/OpenPype/pull/4037)
+- Maya: Soft fail when applying capture preset [\#4034](https://github.com/pypeclub/OpenPype/pull/4034)
+- Igniter: handle missing directory [\#4032](https://github.com/pypeclub/OpenPype/pull/4032)
+- StandalonePublisher: Fix thumbnail publishing [\#4029](https://github.com/pypeclub/OpenPype/pull/4029)
+- Experimental Tools: Fix publisher import [\#4027](https://github.com/pypeclub/OpenPype/pull/4027)
+- Houdini: fix wrong path in ASS loader [\#4016](https://github.com/pypeclub/OpenPype/pull/4016)
+
+**🔀 Refactored code**
+
+- General: Import lib functions from lib [\#4017](https://github.com/pypeclub/OpenPype/pull/4017)
+
## [3.14.5](https://github.com/pypeclub/OpenPype/tree/3.14.5) (2022-10-24)
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.4...3.14.5)
diff --git a/openpype/action.py b/openpype/action.py
index de9cdee010..15c96404b6 100644
--- a/openpype/action.py
+++ b/openpype/action.py
@@ -72,17 +72,19 @@ def get_errored_plugins_from_data(context):
return get_errored_plugins_from_context(context)
-# 'RepairAction' and 'RepairContextAction' were moved to
-# 'openpype.pipeline.publish' please change you imports.
-# There is no "reasonable" way hot mark these classes as deprecated to show
-# warning of wrong import.
-# Deprecated since 3.14.* will be removed in 3.16.*
class RepairAction(pyblish.api.Action):
"""Repairs the action
To process the repairing this requires a static `repair(instance)` method
is available on the plugin.
+ Deprecated:
+ 'RepairAction' and 'RepairContextAction' were moved to
+ 'openpype.pipeline.publish' please change you imports.
+ There is no "reasonable" way hot mark these classes as deprecated
+ to show warning of wrong import. Deprecated since 3.14.* will be
+ removed in 3.16.*
+
"""
label = "Repair"
on = "failed" # This action is only available on a failed plug-in
@@ -103,13 +105,19 @@ class RepairAction(pyblish.api.Action):
plugin.repair(instance)
-# Deprecated since 3.14.* will be removed in 3.16.*
class RepairContextAction(pyblish.api.Action):
"""Repairs the action
To process the repairing this requires a static `repair(instance)` method
is available on the plugin.
+ Deprecated:
+ 'RepairAction' and 'RepairContextAction' were moved to
+ 'openpype.pipeline.publish' please change you imports.
+ There is no "reasonable" way hot mark these classes as deprecated
+ to show warning of wrong import. Deprecated since 3.14.* will be
+ removed in 3.16.*
+
"""
label = "Repair"
on = "failed" # This action is only available on a failed plug-in
diff --git a/openpype/client/entities.py b/openpype/client/entities.py
index 43afccf2f1..c415be8816 100644
--- a/openpype/client/entities.py
+++ b/openpype/client/entities.py
@@ -389,10 +389,11 @@ def get_subset_by_name(project_name, subset_name, asset_id, fields=None):
returned if 'None' is passed.
Returns:
- None: If subset with specified filters was not found.
- Dict: Subset document which can be reduced to specified 'fields'.
- """
+ Union[None, Dict[str, Any]]: None if subset with specified filters was
+ not found or dict subset document which can be reduced to
+ specified 'fields'.
+ """
if not subset_name:
return None
diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py
index 44144e5fff..26b43c39cb 100644
--- a/openpype/hooks/pre_copy_last_published_workfile.py
+++ b/openpype/hooks/pre_copy_last_published_workfile.py
@@ -38,7 +38,7 @@ class CopyLastPublishedWorkfile(PreLaunchHook):
sync_server = self.modules_manager.get("sync_server")
if not sync_server or not sync_server.enabled:
- self.log.deubg("Sync server module is not enabled or available")
+ self.log.debug("Sync server module is not enabled or available")
return
# Check there is no workfile available
diff --git a/openpype/host/interfaces.py b/openpype/host/interfaces.py
index 3b2df745d1..999aefd254 100644
--- a/openpype/host/interfaces.py
+++ b/openpype/host/interfaces.py
@@ -252,7 +252,7 @@ class IWorkfileHost:
Remove when all usages are replaced.
"""
- self.save_workfile()
+ self.save_workfile(dst_path)
def open_file(self, filepath):
"""Deprecated variant of 'open_workfile'.
diff --git a/openpype/hosts/blender/plugins/publish/extract_abc_animation.py b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py
new file mode 100644
index 0000000000..e141ccaa44
--- /dev/null
+++ b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py
@@ -0,0 +1,72 @@
+import os
+
+import bpy
+
+from openpype.pipeline import publish
+from openpype.hosts.blender.api import plugin
+
+
+class ExtractAnimationABC(publish.Extractor):
+ """Extract as ABC."""
+
+ label = "Extract Animation ABC"
+ hosts = ["blender"]
+ families = ["animation"]
+ optional = True
+
+ def process(self, instance):
+ # Define extract output file path
+ stagingdir = self.staging_dir(instance)
+ filename = f"{instance.name}.abc"
+ filepath = os.path.join(stagingdir, filename)
+
+ context = bpy.context
+
+ # Perform extraction
+ self.log.info("Performing extraction..")
+
+ plugin.deselect_all()
+
+ selected = []
+ asset_group = None
+
+ objects = []
+ for obj in instance:
+ if isinstance(obj, bpy.types.Collection):
+ for child in obj.all_objects:
+ objects.append(child)
+ for obj in objects:
+ children = [o for o in bpy.data.objects if o.parent == obj]
+ for child in children:
+ objects.append(child)
+
+ for obj in objects:
+ obj.select_set(True)
+ selected.append(obj)
+
+ context = plugin.create_blender_context(
+ active=asset_group, selected=selected)
+
+ # We export the abc
+ bpy.ops.wm.alembic_export(
+ context,
+ filepath=filepath,
+ selected=True,
+ flatten=False
+ )
+
+ plugin.deselect_all()
+
+ if "representations" not in instance.data:
+ instance.data["representations"] = []
+
+ representation = {
+ 'name': 'abc',
+ 'ext': 'abc',
+ 'files': filename,
+ "stagingDir": stagingdir,
+ }
+ instance.data["representations"].append(representation)
+
+ self.log.info("Extracted instance '%s' to: %s",
+ instance.name, representation)
diff --git a/openpype/hosts/celaction/__init__.py b/openpype/hosts/celaction/__init__.py
index e69de29bb2..8983d48d7d 100644
--- a/openpype/hosts/celaction/__init__.py
+++ b/openpype/hosts/celaction/__init__.py
@@ -0,0 +1,10 @@
+from .addon import (
+ CELACTION_ROOT_DIR,
+ CelactionAddon,
+)
+
+
+__all__ = (
+ "CELACTION_ROOT_DIR",
+ "CelactionAddon",
+)
diff --git a/openpype/hosts/celaction/addon.py b/openpype/hosts/celaction/addon.py
new file mode 100644
index 0000000000..9158010011
--- /dev/null
+++ b/openpype/hosts/celaction/addon.py
@@ -0,0 +1,31 @@
+import os
+from openpype.modules import OpenPypeModule, IHostAddon
+
+CELACTION_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
+
+
+class CelactionAddon(OpenPypeModule, IHostAddon):
+ name = "celaction"
+ host_name = "celaction"
+
+ def initialize(self, module_settings):
+ self.enabled = True
+
+ def get_launch_hook_paths(self, app):
+ if app.host_name != self.host_name:
+ return []
+ return [
+ os.path.join(CELACTION_ROOT_DIR, "hooks")
+ ]
+
+ def add_implementation_envs(self, env, _app):
+ # Set default values if are not already set via settings
+ defaults = {
+ "LOGLEVEL": "DEBUG"
+ }
+ for key, value in defaults.items():
+ if not env.get(key):
+ env[key] = value
+
+ def get_workfile_extensions(self):
+ return [".scn"]
diff --git a/openpype/hosts/celaction/api/__init__.py b/openpype/hosts/celaction/api/__init__.py
deleted file mode 100644
index 8c93d93738..0000000000
--- a/openpype/hosts/celaction/api/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-kwargs = None
diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py
deleted file mode 100644
index 88fc11cafb..0000000000
--- a/openpype/hosts/celaction/api/cli.py
+++ /dev/null
@@ -1,87 +0,0 @@
-import os
-import sys
-import copy
-import argparse
-
-import pyblish.api
-import pyblish.util
-
-import openpype.hosts.celaction
-from openpype.lib import Logger
-from openpype.hosts.celaction import api as celaction
-from openpype.tools.utils import host_tools
-from openpype.pipeline import install_openpype_plugins
-
-
-log = Logger.get_logger("Celaction_cli_publisher")
-
-publish_host = "celaction"
-
-HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.celaction.__file__))
-PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
-PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
-
-
-def cli():
- parser = argparse.ArgumentParser(prog="celaction_publish")
-
- parser.add_argument("--currentFile",
- help="Pass file to Context as `currentFile`")
-
- parser.add_argument("--chunk",
- help=("Render chanks on farm"))
-
- parser.add_argument("--frameStart",
- help=("Start of frame range"))
-
- parser.add_argument("--frameEnd",
- help=("End of frame range"))
-
- parser.add_argument("--resolutionWidth",
- help=("Width of resolution"))
-
- parser.add_argument("--resolutionHeight",
- help=("Height of resolution"))
-
- celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__
-
-
-def _prepare_publish_environments():
- """Prepares environments based on request data."""
- env = copy.deepcopy(os.environ)
-
- project_name = os.getenv("AVALON_PROJECT")
- asset_name = os.getenv("AVALON_ASSET")
-
- env["AVALON_PROJECT"] = project_name
- env["AVALON_ASSET"] = asset_name
- env["AVALON_TASK"] = os.getenv("AVALON_TASK")
- env["AVALON_WORKDIR"] = os.getenv("AVALON_WORKDIR")
- env["AVALON_APP"] = f"hosts.{publish_host}"
- env["AVALON_APP_NAME"] = "celaction/local"
-
- env["PYBLISH_HOSTS"] = publish_host
-
- os.environ.update(env)
-
-
-def main():
- # prepare all environments
- _prepare_publish_environments()
-
- # Registers pype's Global pyblish plugins
- install_openpype_plugins()
-
- if os.path.exists(PUBLISH_PATH):
- log.info(f"Registering path: {PUBLISH_PATH}")
- pyblish.api.register_plugin_path(PUBLISH_PATH)
-
- pyblish.api.register_host(publish_host)
-
- return host_tools.show_publish()
-
-
-if __name__ == "__main__":
- cli()
- result = main()
- sys.exit(not bool(result))
diff --git a/openpype/hosts/celaction/hooks/pre_celaction_registers.py b/openpype/hosts/celaction/hooks/pre_celaction_registers.py
deleted file mode 100644
index e49e66f163..0000000000
--- a/openpype/hosts/celaction/hooks/pre_celaction_registers.py
+++ /dev/null
@@ -1,122 +0,0 @@
-import os
-import shutil
-import winreg
-from openpype.lib import PreLaunchHook
-from openpype.hosts.celaction import api as celaction
-
-
-class CelactionPrelaunchHook(PreLaunchHook):
- """
- Bootstrap celacion with pype
- """
- workfile_ext = "scn"
- app_groups = ["celaction"]
- platforms = ["windows"]
-
- def execute(self):
- # Add workfile path to launch arguments
- workfile_path = self.workfile_path()
- if workfile_path:
- self.launch_context.launch_args.append(workfile_path)
-
- project_name = self.data["project_name"]
- asset_name = self.data["asset_name"]
- task_name = self.data["task_name"]
-
- # get publish version of celaction
- app = "celaction_publish"
-
- # setting output parameters
- path = r"Software\CelAction\CelAction2D\User Settings"
- winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
- hKey = winreg.OpenKey(
- winreg.HKEY_CURRENT_USER,
- "Software\\CelAction\\CelAction2D\\User Settings", 0,
- winreg.KEY_ALL_ACCESS)
-
- # TODO: this will need to be checked more thoroughly
- pype_exe = os.getenv("OPENPYPE_EXECUTABLE")
-
- winreg.SetValueEx(hKey, "SubmitAppTitle", 0, winreg.REG_SZ, pype_exe)
-
- parameters = [
- "launch",
- f"--app {app}",
- f"--project {project_name}",
- f"--asset {asset_name}",
- f"--task {task_name}",
- "--currentFile \\\"\"*SCENE*\"\\\"",
- "--chunk 10",
- "--frameStart *START*",
- "--frameEnd *END*",
- "--resolutionWidth *X*",
- "--resolutionHeight *Y*",
- # "--programDir \"'*PROGPATH*'\""
- ]
- winreg.SetValueEx(hKey, "SubmitParametersTitle", 0, winreg.REG_SZ,
- " ".join(parameters))
-
- # setting resolution parameters
- path = r"Software\CelAction\CelAction2D\User Settings\Dialogs"
- path += r"\SubmitOutput"
- winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
- hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
- winreg.KEY_ALL_ACCESS)
- winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1)
- winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, 1920)
- winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, 1080)
-
- # making sure message dialogs don't appear when overwriting
- path = r"Software\CelAction\CelAction2D\User Settings\Messages"
- path += r"\OverwriteScene"
- winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
- hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
- winreg.KEY_ALL_ACCESS)
- winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 6)
- winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
-
- path = r"Software\CelAction\CelAction2D\User Settings\Messages"
- path += r"\SceneSaved"
- winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
- hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
- winreg.KEY_ALL_ACCESS)
- winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 1)
- winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
-
- def workfile_path(self):
- workfile_path = self.data["last_workfile_path"]
-
- # copy workfile from template if doesnt exist any on path
- if not os.path.exists(workfile_path):
- # TODO add ability to set different template workfile path via
- # settings
- pype_celaction_dir = os.path.dirname(os.path.dirname(
- os.path.abspath(celaction.__file__)
- ))
- template_path = os.path.join(
- pype_celaction_dir,
- "resources",
- "celaction_template_scene.scn"
- )
-
- if not os.path.exists(template_path):
- self.log.warning(
- "Couldn't find workfile template file in {}".format(
- template_path
- )
- )
- return
-
- self.log.info(
- f"Creating workfile from template: \"{template_path}\""
- )
-
- # Copy template workfile to new destinantion
- shutil.copy2(
- os.path.normpath(template_path),
- os.path.normpath(workfile_path)
- )
-
- self.log.info(f"Workfile to open: \"{workfile_path}\"")
-
- return workfile_path
diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py
new file mode 100644
index 0000000000..62cebf99ed
--- /dev/null
+++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py
@@ -0,0 +1,137 @@
+import os
+import shutil
+import winreg
+import subprocess
+from openpype.lib import PreLaunchHook, get_openpype_execute_args
+from openpype.hosts.celaction import scripts
+
+CELACTION_SCRIPTS_DIR = os.path.dirname(
+ os.path.abspath(scripts.__file__)
+)
+
+
+class CelactionPrelaunchHook(PreLaunchHook):
+ """
+ Bootstrap celacion with pype
+ """
+ app_groups = ["celaction"]
+ platforms = ["windows"]
+
+ def execute(self):
+ asset_doc = self.data["asset_doc"]
+ width = asset_doc["data"]["resolutionWidth"]
+ height = asset_doc["data"]["resolutionHeight"]
+
+ # Add workfile path to launch arguments
+ workfile_path = self.workfile_path()
+ if workfile_path:
+ self.launch_context.launch_args.append(workfile_path)
+
+ # setting output parameters
+ path_user_settings = "\\".join([
+ "Software", "CelAction", "CelAction2D", "User Settings"
+ ])
+ winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_user_settings)
+ hKey = winreg.OpenKey(
+ winreg.HKEY_CURRENT_USER, path_user_settings, 0,
+ winreg.KEY_ALL_ACCESS
+ )
+
+ path_to_cli = os.path.join(CELACTION_SCRIPTS_DIR, "publish_cli.py")
+ subproces_args = get_openpype_execute_args("run", path_to_cli)
+ openpype_executable = subproces_args.pop(0)
+
+ winreg.SetValueEx(
+ hKey,
+ "SubmitAppTitle",
+ 0,
+ winreg.REG_SZ,
+ openpype_executable
+ )
+
+ parameters = subproces_args + [
+ "--currentFile", "*SCENE*",
+ "--chunk", "*CHUNK*",
+ "--frameStart", "*START*",
+ "--frameEnd", "*END*",
+ "--resolutionWidth", "*X*",
+ "--resolutionHeight", "*Y*"
+ ]
+
+ winreg.SetValueEx(
+ hKey, "SubmitParametersTitle", 0, winreg.REG_SZ,
+ subprocess.list2cmdline(parameters)
+ )
+
+ # setting resolution parameters
+ path_submit = "\\".join([
+ path_user_settings, "Dialogs", "SubmitOutput"
+ ])
+ winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_submit)
+ hKey = winreg.OpenKey(
+ winreg.HKEY_CURRENT_USER, path_submit, 0,
+ winreg.KEY_ALL_ACCESS
+ )
+ winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1)
+ winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, width)
+ winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, height)
+
+ # making sure message dialogs don't appear when overwriting
+ path_overwrite_scene = "\\".join([
+ path_user_settings, "Messages", "OverwriteScene"
+ ])
+ winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_overwrite_scene)
+ hKey = winreg.OpenKey(
+ winreg.HKEY_CURRENT_USER, path_overwrite_scene, 0,
+ winreg.KEY_ALL_ACCESS
+ )
+ winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 6)
+ winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
+
+ # set scane as not saved
+ path_scene_saved = "\\".join([
+ path_user_settings, "Messages", "SceneSaved"
+ ])
+ winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_scene_saved)
+ hKey = winreg.OpenKey(
+ winreg.HKEY_CURRENT_USER, path_scene_saved, 0,
+ winreg.KEY_ALL_ACCESS
+ )
+ winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 1)
+ winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
+
+ def workfile_path(self):
+ workfile_path = self.data["last_workfile_path"]
+
+ # copy workfile from template if doesnt exist any on path
+ if not os.path.exists(workfile_path):
+ # TODO add ability to set different template workfile path via
+ # settings
+ openpype_celaction_dir = os.path.dirname(CELACTION_SCRIPTS_DIR)
+ template_path = os.path.join(
+ openpype_celaction_dir,
+ "resources",
+ "celaction_template_scene.scn"
+ )
+
+ if not os.path.exists(template_path):
+ self.log.warning(
+ "Couldn't find workfile template file in {}".format(
+ template_path
+ )
+ )
+ return
+
+ self.log.info(
+ f"Creating workfile from template: \"{template_path}\""
+ )
+
+ # Copy template workfile to new destinantion
+ shutil.copy2(
+ os.path.normpath(template_path),
+ os.path.normpath(workfile_path)
+ )
+
+ self.log.info(f"Workfile to open: \"{workfile_path}\"")
+
+ return workfile_path
diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py
index 15c5ddaf1c..bf97dd744b 100644
--- a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py
+++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py
@@ -1,5 +1,7 @@
import pyblish.api
-from openpype.hosts.celaction import api as celaction
+import argparse
+import sys
+from pprint import pformat
class CollectCelactionCliKwargs(pyblish.api.Collector):
@@ -9,15 +11,31 @@ class CollectCelactionCliKwargs(pyblish.api.Collector):
order = pyblish.api.Collector.order - 0.1
def process(self, context):
- kwargs = celaction.kwargs.copy()
+ parser = argparse.ArgumentParser(prog="celaction")
+ parser.add_argument("--currentFile",
+ help="Pass file to Context as `currentFile`")
+ parser.add_argument("--chunk",
+ help=("Render chanks on farm"))
+ parser.add_argument("--frameStart",
+ help=("Start of frame range"))
+ parser.add_argument("--frameEnd",
+ help=("End of frame range"))
+ parser.add_argument("--resolutionWidth",
+ help=("Width of resolution"))
+ parser.add_argument("--resolutionHeight",
+ help=("Height of resolution"))
+ passing_kwargs = parser.parse_args(sys.argv[1:]).__dict__
- self.log.info("Storing kwargs: %s" % kwargs)
- context.set_data("kwargs", kwargs)
+ self.log.info("Storing kwargs ...")
+ self.log.debug("_ passing_kwargs: {}".format(pformat(passing_kwargs)))
+
+ # set kwargs to context data
+ context.set_data("passingKwargs", passing_kwargs)
# get kwargs onto context data as keys with values
- for k, v in kwargs.items():
+ for k, v in passing_kwargs.items():
self.log.info(f"Setting `{k}` to instance.data with value: `{v}`")
if k in ["frameStart", "frameEnd"]:
- context.data[k] = kwargs[k] = int(v)
+ context.data[k] = passing_kwargs[k] = int(v)
else:
context.data[k] = v
diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py
index 1d2d9da1af..35ac7fc264 100644
--- a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py
+++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py
@@ -36,7 +36,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
"version": version
}
- celaction_kwargs = context.data.get("kwargs", {})
+ celaction_kwargs = context.data.get(
+ "passingKwargs", {})
if celaction_kwargs:
shared_instance_data.update(celaction_kwargs)
@@ -52,8 +53,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
"subset": subset,
"label": scene_file,
"family": family,
- "families": [family, "ftrack"],
- "representations": list()
+ "families": [],
+ "representations": []
})
# adding basic script data
@@ -72,7 +73,6 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
self.log.info('Publishing Celaction workfile')
# render instance
- family = "render.farm"
subset = f"render{task}Main"
instance = context.create_instance(name=subset)
# getting instance state
@@ -81,8 +81,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
# add assetEntity data into instance
instance.data.update({
"label": "{} - farm".format(subset),
- "family": family,
- "families": [family],
+ "family": "render.farm",
+ "families": [],
"subset": subset
})
diff --git a/openpype/hosts/celaction/plugins/publish/collect_render_path.py b/openpype/hosts/celaction/plugins/publish/collect_render_path.py
index 9cbb0e4880..f6db6c000d 100644
--- a/openpype/hosts/celaction/plugins/publish/collect_render_path.py
+++ b/openpype/hosts/celaction/plugins/publish/collect_render_path.py
@@ -11,28 +11,31 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
families = ["render.farm"]
# Presets
- anatomy_render_key = None
- publish_render_metadata = None
+ output_extension = "png"
+ anatomy_template_key_render_files = None
+ anatomy_template_key_metadata = None
def process(self, instance):
anatomy = instance.context.data["anatomy"]
anatomy_data = copy.deepcopy(instance.data["anatomyData"])
- anatomy_data["family"] = "render"
padding = anatomy.templates.get("frame_padding", 4)
anatomy_data.update({
"frame": f"%0{padding}d",
- "representation": "png"
+ "family": "render",
+ "representation": self.output_extension,
+ "ext": self.output_extension
})
anatomy_filled = anatomy.format(anatomy_data)
# get anatomy rendering keys
- anatomy_render_key = self.anatomy_render_key or "render"
- publish_render_metadata = self.publish_render_metadata or "render"
+ r_anatomy_key = self.anatomy_template_key_render_files
+ m_anatomy_key = self.anatomy_template_key_metadata
# get folder and path for rendering images from celaction
- render_dir = anatomy_filled[anatomy_render_key]["folder"]
- render_path = anatomy_filled[anatomy_render_key]["path"]
+ render_dir = anatomy_filled[r_anatomy_key]["folder"]
+ render_path = anatomy_filled[r_anatomy_key]["path"]
+ self.log.debug("__ render_path: `{}`".format(render_path))
# create dir if it doesnt exists
try:
@@ -46,9 +49,9 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
instance.data["path"] = render_path
# get anatomy for published renders folder path
- if anatomy_filled.get(publish_render_metadata):
+ if anatomy_filled.get(m_anatomy_key):
instance.data["publishRenderMetadataFolder"] = anatomy_filled[
- publish_render_metadata]["folder"]
+ m_anatomy_key]["folder"]
self.log.info("Metadata render path: `{}`".format(
instance.data["publishRenderMetadataFolder"]
))
diff --git a/openpype/hosts/celaction/scripts/__init__.py b/openpype/hosts/celaction/scripts/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/openpype/hosts/celaction/scripts/publish_cli.py b/openpype/hosts/celaction/scripts/publish_cli.py
new file mode 100644
index 0000000000..39d3f1a94d
--- /dev/null
+++ b/openpype/hosts/celaction/scripts/publish_cli.py
@@ -0,0 +1,37 @@
+import os
+import sys
+
+import pyblish.api
+import pyblish.util
+
+import openpype.hosts.celaction
+from openpype.lib import Logger
+from openpype.tools.utils import host_tools
+from openpype.pipeline import install_openpype_plugins
+
+
+log = Logger.get_logger("celaction")
+
+PUBLISH_HOST = "celaction"
+HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.celaction.__file__))
+PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
+PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
+
+
+def main():
+ # Registers pype's Global pyblish plugins
+ install_openpype_plugins()
+
+ if os.path.exists(PUBLISH_PATH):
+ log.info(f"Registering path: {PUBLISH_PATH}")
+ pyblish.api.register_plugin_path(PUBLISH_PATH)
+
+ pyblish.api.register_host(PUBLISH_HOST)
+ pyblish.api.register_target("local")
+
+ return host_tools.show_publish()
+
+
+if __name__ == "__main__":
+ result = main()
+ sys.exit(not bool(result))
diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py
index 092ce9d106..ca113fd98a 100644
--- a/openpype/hosts/flame/api/plugin.py
+++ b/openpype/hosts/flame/api/plugin.py
@@ -4,13 +4,13 @@ import shutil
from copy import deepcopy
from xml.etree import ElementTree as ET
+import qargparse
from Qt import QtCore, QtWidgets
-import qargparse
from openpype import style
-from openpype.settings import get_current_project_settings
from openpype.lib import Logger
from openpype.pipeline import LegacyCreator, LoaderPlugin
+from openpype.settings import get_current_project_settings
from . import constants
from . import lib as flib
@@ -596,18 +596,28 @@ class PublishableClip:
if not hero_track and self.vertical_sync:
# driving layer is set as negative match
for (_in, _out), hero_data in self.vertical_clip_match.items():
- hero_data.update({"heroTrack": False})
- if _in == self.clip_in and _out == self.clip_out:
+ """
+ Since only one instance of hero clip is expected in
+ `self.vertical_clip_match`, this will loop only once
+ until none hero clip will be matched with hero clip.
+
+ `tag_hierarchy_data` will be set only once for every
+ clip which is not hero clip.
+ """
+ _hero_data = deepcopy(hero_data)
+ _hero_data.update({"heroTrack": False})
+ if _in <= self.clip_in and _out >= self.clip_out:
data_subset = hero_data["subset"]
# add track index in case duplicity of names in hero data
if self.subset in data_subset:
- hero_data["subset"] = self.subset + str(
+ _hero_data["subset"] = self.subset + str(
self.track_index)
# in case track name and subset name is the same then add
if self.subset_name == self.track_name:
- hero_data["subset"] = self.subset
+ _hero_data["subset"] = self.subset
# assing data to return hierarchy data to tag
- tag_hierarchy_data = hero_data
+ tag_hierarchy_data = _hero_data
+ break
# add data to return data dict
self.marker_data.update(tag_hierarchy_data)
@@ -690,6 +700,54 @@ class ClipLoader(LoaderPlugin):
)
]
+ _mapping = None
+
+ def get_colorspace(self, context):
+ """Get colorspace name
+
+ Look either to version data or representation data.
+
+ Args:
+ context (dict): version context data
+
+ Returns:
+ str: colorspace name or None
+ """
+ version = context['version']
+ version_data = version.get("data", {})
+ colorspace = version_data.get(
+ "colorspace", None
+ )
+
+ if (
+ not colorspace
+ or colorspace == "Unknown"
+ ):
+ colorspace = context["representation"]["data"].get(
+ "colorspace", None)
+
+ return colorspace
+
+ @classmethod
+ def get_native_colorspace(cls, input_colorspace):
+ """Return native colorspace name.
+
+ Args:
+ input_colorspace (str | None): colorspace name
+
+ Returns:
+ str: native colorspace name defined in mapping or None
+ """
+ if not cls._mapping:
+ settings = get_current_project_settings()["flame"]
+ mapping = settings["imageio"]["profilesMapping"]["inputs"]
+ cls._mapping = {
+ input["ocioName"]: input["flameName"]
+ for input in mapping
+ }
+
+ return cls._mapping.get(input_colorspace)
+
class OpenClipSolver(flib.MediaInfoFile):
create_new_clip = False
diff --git a/openpype/hosts/flame/plugins/load/load_clip.py b/openpype/hosts/flame/plugins/load/load_clip.py
index 0843dde76a..f8cb7b3e11 100644
--- a/openpype/hosts/flame/plugins/load/load_clip.py
+++ b/openpype/hosts/flame/plugins/load/load_clip.py
@@ -36,14 +36,15 @@ class LoadClip(opfapi.ClipLoader):
version = context['version']
version_data = version.get("data", {})
version_name = version.get("name", None)
- colorspace = version_data.get("colorspace", None)
+ colorspace = self.get_colorspace(context)
+
clip_name = StringTemplate(self.clip_name_template).format(
context["representation"]["context"])
- # TODO: settings in imageio
# convert colorspace with ocio to flame mapping
# in imageio flame section
- colorspace = colorspace
+ colorspace = self.get_native_colorspace(colorspace)
+ self.log.info("Loading with colorspace: `{}`".format(colorspace))
# create workfile path
workfile_dir = os.environ["AVALON_WORKDIR"]
diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py
index 17ad8075e4..048ac19431 100644
--- a/openpype/hosts/flame/plugins/load/load_clip_batch.py
+++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py
@@ -35,7 +35,7 @@ class LoadClipBatch(opfapi.ClipLoader):
version = context['version']
version_data = version.get("data", {})
version_name = version.get("name", None)
- colorspace = version_data.get("colorspace", None)
+ colorspace = self.get_colorspace(context)
# in case output is not in context replace key to representation
if not context["representation"]["context"].get("output"):
@@ -47,10 +47,10 @@ class LoadClipBatch(opfapi.ClipLoader):
clip_name = StringTemplate(self.clip_name_template).format(
formating_data)
- # TODO: settings in imageio
# convert colorspace with ocio to flame mapping
# in imageio flame section
- colorspace = colorspace
+ colorspace = self.get_native_colorspace(colorspace)
+ self.log.info("Loading with colorspace: `{}`".format(colorspace))
# create workfile path
workfile_dir = options.get("workdir") or os.environ["AVALON_WORKDIR"]
diff --git a/openpype/hosts/hiero/addon.py b/openpype/hosts/hiero/addon.py
index f5bb94dbaa..1cc7a8637e 100644
--- a/openpype/hosts/hiero/addon.py
+++ b/openpype/hosts/hiero/addon.py
@@ -27,7 +27,12 @@ class HieroAddon(OpenPypeModule, IHostAddon):
new_hiero_paths.append(norm_path)
env["HIERO_PLUGIN_PATH"] = os.pathsep.join(new_hiero_paths)
+ # Remove auto screen scale factor for Qt
+ # - let Hiero decide it's value
env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None)
+ # Remove tkinter library paths if are set
+ env.pop("TK_LIBRARY", None)
+ env.pop("TCL_LIBRARY", None)
# Add vendor to PYTHONPATH
python_path = env["PYTHONPATH"]
diff --git a/openpype/hosts/hiero/api/__init__.py b/openpype/hosts/hiero/api/__init__.py
index 781f846bbe..1fa40c9f74 100644
--- a/openpype/hosts/hiero/api/__init__.py
+++ b/openpype/hosts/hiero/api/__init__.py
@@ -30,9 +30,15 @@ from .lib import (
get_timeline_selection,
get_current_track,
get_track_item_tags,
+ get_track_openpype_tag,
+ set_track_openpype_tag,
+ get_track_openpype_data,
get_track_item_pype_tag,
set_track_item_pype_tag,
get_track_item_pype_data,
+ get_trackitem_openpype_tag,
+ set_trackitem_openpype_tag,
+ get_trackitem_openpype_data,
set_publish_attribute,
get_publish_attribute,
imprint,
@@ -85,9 +91,12 @@ __all__ = [
"get_timeline_selection",
"get_current_track",
"get_track_item_tags",
- "get_track_item_pype_tag",
- "set_track_item_pype_tag",
- "get_track_item_pype_data",
+ "get_track_openpype_tag",
+ "set_track_openpype_tag",
+ "get_track_openpype_data",
+ "get_trackitem_openpype_tag",
+ "set_trackitem_openpype_tag",
+ "get_trackitem_openpype_data",
"set_publish_attribute",
"get_publish_attribute",
"imprint",
@@ -99,6 +108,10 @@ __all__ = [
"apply_colorspace_project",
"apply_colorspace_clips",
"get_sequence_pattern_and_padding",
+ # depricated
+ "get_track_item_pype_tag",
+ "set_track_item_pype_tag",
+ "get_track_item_pype_data",
# plugins
"CreatorWidget",
diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py
index e5d35945af..7f0cf8149a 100644
--- a/openpype/hosts/hiero/api/lib.py
+++ b/openpype/hosts/hiero/api/lib.py
@@ -7,11 +7,15 @@ import os
import re
import sys
import platform
+import functools
+import warnings
+import json
import ast
+import secrets
import shutil
import hiero
-from Qt import QtWidgets
+from Qt import QtWidgets, QtCore, QtXml
from openpype.client import get_project
from openpype.settings import get_project_settings
@@ -20,15 +24,51 @@ from openpype.pipeline.load import filter_containers
from openpype.lib import Logger
from . import tags
-try:
- from PySide.QtCore import QFile, QTextStream
- from PySide.QtXml import QDomDocument
-except ImportError:
- from PySide2.QtCore import QFile, QTextStream
- from PySide2.QtXml import QDomDocument
-# from opentimelineio import opentime
-# from pprint import pformat
+class DeprecatedWarning(DeprecationWarning):
+ pass
+
+
+def deprecated(new_destination):
+ """Mark functions as deprecated.
+
+ It will result in a warning being emitted when the function is used.
+ """
+
+ func = None
+ if callable(new_destination):
+ func = new_destination
+ new_destination = None
+
+ def _decorator(decorated_func):
+ if new_destination is None:
+ warning_message = (
+ " Please check content of deprecated function to figure out"
+ " possible replacement."
+ )
+ else:
+ warning_message = " Please replace your usage with '{}'.".format(
+ new_destination
+ )
+
+ @functools.wraps(decorated_func)
+ def wrapper(*args, **kwargs):
+ warnings.simplefilter("always", DeprecatedWarning)
+ warnings.warn(
+ (
+ "Call to deprecated function '{}'"
+ "\nFunction was moved or removed.{}"
+ ).format(decorated_func.__name__, warning_message),
+ category=DeprecatedWarning,
+ stacklevel=4
+ )
+ return decorated_func(*args, **kwargs)
+ return wrapper
+
+ if func is None:
+ return _decorator
+ return _decorator(func)
+
log = Logger.get_logger(__name__)
@@ -301,7 +341,124 @@ def get_track_item_tags(track_item):
return returning_tag_data
+def _get_tag_unique_hash():
+ # sourcery skip: avoid-builtin-shadow
+ return secrets.token_hex(nbytes=4)
+
+
+def set_track_openpype_tag(track, data=None):
+ """
+ Set openpype track tag to input track object.
+
+ Attributes:
+ track (hiero.core.VideoTrack): hiero object
+
+ Returns:
+ hiero.core.Tag
+ """
+ data = data or {}
+
+ # basic Tag's attribute
+ tag_data = {
+ "editable": "0",
+ "note": "OpenPype data container",
+ "icon": "openpype_icon.png",
+ "metadata": dict(data.items())
+ }
+ # get available pype tag if any
+ _tag = get_track_openpype_tag(track)
+
+ if _tag:
+ # it not tag then create one
+ tag = tags.update_tag(_tag, tag_data)
+ else:
+ # if pype tag available then update with input data
+ tag = tags.create_tag(
+ "{}_{}".format(
+ self.pype_tag_name,
+ _get_tag_unique_hash()
+ ),
+ tag_data
+ )
+ # add it to the input track item
+ track.addTag(tag)
+
+ return tag
+
+
+def get_track_openpype_tag(track):
+ """
+ Get pype track item tag created by creator or loader plugin.
+
+ Attributes:
+ trackItem (hiero.core.TrackItem): hiero object
+
+ Returns:
+ hiero.core.Tag: hierarchy, orig clip attributes
+ """
+ # get all tags from track item
+ _tags = track.tags()
+ if not _tags:
+ return None
+ for tag in _tags:
+ # return only correct tag defined by global name
+ if self.pype_tag_name in tag.name():
+ return tag
+
+
+def get_track_openpype_data(track, container_name=None):
+ """
+ Get track's openpype tag data.
+
+ Attributes:
+ trackItem (hiero.core.VideoTrack): hiero object
+
+ Returns:
+ dict: data found on pype tag
+ """
+ return_data = {}
+ # get pype data tag from track item
+ tag = get_track_openpype_tag(track)
+
+ if not tag:
+ return None
+
+ # get tag metadata attribute
+ tag_data = deepcopy(dict(tag.metadata()))
+
+ for obj_name, obj_data in tag_data.items():
+ obj_name = obj_name.replace("tag.", "")
+
+ if obj_name in ["applieswhole", "note", "label"]:
+ continue
+ return_data[obj_name] = json.loads(obj_data)
+
+ return (
+ return_data[container_name]
+ if container_name
+ else return_data
+ )
+
+
+@deprecated("openpype.hosts.hiero.api.lib.get_trackitem_openpype_tag")
def get_track_item_pype_tag(track_item):
+ # backward compatibility alias
+ return get_trackitem_openpype_tag(track_item)
+
+
+@deprecated("openpype.hosts.hiero.api.lib.set_trackitem_openpype_tag")
+def set_track_item_pype_tag(track_item, data=None):
+ # backward compatibility alias
+ return set_trackitem_openpype_tag(track_item, data)
+
+
+@deprecated("openpype.hosts.hiero.api.lib.get_trackitem_openpype_data")
+def get_track_item_pype_data(track_item):
+ # backward compatibility alias
+ return get_trackitem_openpype_data(track_item)
+
+
+def get_trackitem_openpype_tag(track_item):
"""
Get pype track item tag created by creator or loader plugin.
@@ -317,16 +474,16 @@ def get_track_item_pype_tag(track_item):
return None
for tag in _tags:
# return only correct tag defined by global name
- if tag.name() == self.pype_tag_name:
+ if self.pype_tag_name in tag.name():
return tag
-def set_track_item_pype_tag(track_item, data=None):
+def set_trackitem_openpype_tag(track_item, data=None):
"""
- Set pype track item tag to input track_item.
+ Set openpype track tag to input track object.
Attributes:
- trackItem (hiero.core.TrackItem): hiero object
+ track (hiero.core.VideoTrack): hiero object
Returns:
hiero.core.Tag
@@ -341,21 +498,26 @@ def set_track_item_pype_tag(track_item, data=None):
"metadata": dict(data.items())
}
# get available pype tag if any
- _tag = get_track_item_pype_tag(track_item)
-
+ _tag = get_trackitem_openpype_tag(track_item)
if _tag:
# it not tag then create one
tag = tags.update_tag(_tag, tag_data)
else:
# if pype tag available then update with input data
- tag = tags.create_tag(self.pype_tag_name, tag_data)
+ tag = tags.create_tag(
+ "{}_{}".format(
+ self.pype_tag_name,
+ _get_tag_unique_hash()
+ ),
+ tag_data
+ )
# add it to the input track item
track_item.addTag(tag)
return tag
-def get_track_item_pype_data(track_item):
+def get_trackitem_openpype_data(track_item):
"""
Get track item's pype tag data.
@@ -367,7 +529,7 @@ def get_track_item_pype_data(track_item):
"""
data = {}
# get pype data tag from track item
- tag = get_track_item_pype_tag(track_item)
+ tag = get_trackitem_openpype_tag(track_item)
if not tag:
return None
@@ -420,7 +582,7 @@ def imprint(track_item, data=None):
"""
data = data or {}
- tag = set_track_item_pype_tag(track_item, data)
+ tag = set_trackitem_openpype_tag(track_item, data)
# add publish attribute
set_publish_attribute(tag, True)
@@ -832,22 +994,22 @@ def set_selected_track_items(track_items_list, sequence=None):
def _read_doc_from_path(path):
- # reading QDomDocument from HROX path
- hrox_file = QFile(path)
- if not hrox_file.open(QFile.ReadOnly):
+ # reading QtXml.QDomDocument from HROX path
+ hrox_file = QtCore.QFile(path)
+ if not hrox_file.open(QtCore.QFile.ReadOnly):
raise RuntimeError("Failed to open file for reading")
- doc = QDomDocument()
+ doc = QtXml.QDomDocument()
doc.setContent(hrox_file)
hrox_file.close()
return doc
def _write_doc_to_path(doc, path):
- # write QDomDocument to path as HROX
- hrox_file = QFile(path)
- if not hrox_file.open(QFile.WriteOnly):
+ # write QtXml.QDomDocument to path as HROX
+ hrox_file = QtCore.QFile(path)
+ if not hrox_file.open(QtCore.QFile.WriteOnly):
raise RuntimeError("Failed to open file for writing")
- stream = QTextStream(hrox_file)
+ stream = QtCore.QTextStream(hrox_file)
doc.save(stream, 1)
hrox_file.close()
@@ -1030,7 +1192,7 @@ def sync_clip_name_to_data_asset(track_items_list):
# get name and data
ti_name = track_item.name()
- data = get_track_item_pype_data(track_item)
+ data = get_trackitem_openpype_data(track_item)
# ignore if no data on the clip or not publish instance
if not data:
@@ -1042,10 +1204,10 @@ def sync_clip_name_to_data_asset(track_items_list):
if data["asset"] != ti_name:
data["asset"] = ti_name
# remove the original tag
- tag = get_track_item_pype_tag(track_item)
+ tag = get_trackitem_openpype_tag(track_item)
track_item.removeTag(tag)
# create new tag with updated data
- set_track_item_pype_tag(track_item, data)
+ set_trackitem_openpype_tag(track_item, data)
print("asset was changed in clip: {}".format(ti_name))
@@ -1083,10 +1245,10 @@ def check_inventory_versions(track_items=None):
project_name = legacy_io.active_project()
filter_result = filter_containers(containers, project_name)
for container in filter_result.latest:
- set_track_color(container["_track_item"], clip_color)
+ set_track_color(container["_item"], clip_color)
for container in filter_result.outdated:
- set_track_color(container["_track_item"], clip_color_last)
+ set_track_color(container["_item"], clip_color_last)
def selection_changed_timeline(event):
diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py
index ea61dc4785..4ab73e7d19 100644
--- a/openpype/hosts/hiero/api/pipeline.py
+++ b/openpype/hosts/hiero/api/pipeline.py
@@ -1,6 +1,7 @@
"""
Basic avalon integration
"""
+from copy import deepcopy
import os
import contextlib
from collections import OrderedDict
@@ -17,6 +18,7 @@ from openpype.pipeline import (
)
from openpype.tools.utils import host_tools
from . import lib, menu, events
+import hiero
log = Logger.get_logger(__name__)
@@ -106,7 +108,7 @@ def containerise(track_item,
data_imprint.update({k: v})
log.debug("_ data_imprint: {}".format(data_imprint))
- lib.set_track_item_pype_tag(track_item, data_imprint)
+ lib.set_trackitem_openpype_tag(track_item, data_imprint)
return track_item
@@ -123,79 +125,131 @@ def ls():
"""
# get all track items from current timeline
- all_track_items = lib.get_track_items()
+ all_items = lib.get_track_items()
- for track_item in all_track_items:
- container = parse_container(track_item)
- if container:
- yield container
+ # append all video tracks
+ for track in lib.get_current_sequence():
+ if type(track) != hiero.core.VideoTrack:
+ continue
+ all_items.append(track)
+
+ for item in all_items:
+ container_data = parse_container(item)
+
+ if isinstance(container_data, list):
+ for _c in container_data:
+ yield _c
+ elif container_data:
+ yield container_data
-def parse_container(track_item, validate=True):
+def parse_container(item, validate=True):
"""Return container data from track_item's pype tag.
Args:
- track_item (hiero.core.TrackItem): A containerised track item.
+ item (hiero.core.TrackItem or hiero.core.VideoTrack):
+ A containerised track item.
validate (bool)[optional]: validating with avalon scheme
Returns:
dict: The container schema data for input containerized track item.
"""
+ def data_to_container(item, data):
+ if (
+ not data
+ or data.get("id") != "pyblish.avalon.container"
+ ):
+ return
+
+ if validate and data and data.get("schema"):
+ schema.validate(data)
+
+ if not isinstance(data, dict):
+ return
+
+ # If not all required data return the empty container
+ required = ['schema', 'id', 'name',
+ 'namespace', 'loader', 'representation']
+
+ if any(key not in data for key in required):
+ return
+
+ container = {key: data[key] for key in required}
+
+ container["objectName"] = item.name()
+
+ # Store reference to the node object
+ container["_item"] = item
+
+ return container
+
# convert tag metadata to normal keys names
- data = lib.get_track_item_pype_data(track_item)
- if (
- not data
- or data.get("id") != "pyblish.avalon.container"
- ):
- return
+ if type(item) == hiero.core.VideoTrack:
+ return_list = []
+ _data = lib.get_track_openpype_data(item)
- if validate and data and data.get("schema"):
- schema.validate(data)
+ if not _data:
+ return
+ # convert the data to list and validate them
+ for _, obj_data in _data.items():
+ cotnainer = data_to_container(item, obj_data)
+ return_list.append(cotnainer)
+ return return_list
+ else:
+ _data = lib.get_trackitem_openpype_data(item)
+ return data_to_container(item, _data)
- if not isinstance(data, dict):
- return
-
- # If not all required data return the empty container
- required = ['schema', 'id', 'name',
- 'namespace', 'loader', 'representation']
-
- if not all(key in data for key in required):
- return
-
- container = {key: data[key] for key in required}
-
- container["objectName"] = track_item.name()
-
- # Store reference to the node object
- container["_track_item"] = track_item
+def _update_container_data(container, data):
+ for key in container:
+ try:
+ container[key] = data[key]
+ except KeyError:
+ pass
return container
-def update_container(track_item, data=None):
- """Update container data to input track_item's pype tag.
+def update_container(item, data=None):
+ """Update container data to input track_item or track's
+ openpype tag.
Args:
- track_item (hiero.core.TrackItem): A containerised track item.
+ item (hiero.core.TrackItem or hiero.core.VideoTrack):
+ A containerised track item.
data (dict)[optional]: dictionery with data to be updated
Returns:
bool: True if container was updated correctly
"""
- data = data or dict()
- container = lib.get_track_item_pype_data(track_item)
+ data = data or {}
+ data = deepcopy(data)
- for _key, _value in container.items():
- try:
- container[_key] = data[_key]
- except KeyError:
- pass
+ if type(item) == hiero.core.VideoTrack:
+ # form object data for test
+ object_name = data["objectName"]
- log.info("Updating container: `{}`".format(track_item.name()))
- return bool(lib.set_track_item_pype_tag(track_item, container))
+ # get all available containers
+ containers = lib.get_track_openpype_data(item)
+ container = lib.get_track_openpype_data(item, object_name)
+
+ containers = deepcopy(containers)
+ container = deepcopy(container)
+
+ # update data in container
+ updated_container = _update_container_data(container, data)
+ # merge updated container back to containers
+ containers.update({object_name: updated_container})
+
+ return bool(lib.set_track_openpype_tag(item, containers))
+ else:
+ container = lib.get_trackitem_openpype_data(item)
+ updated_container = _update_container_data(container, data)
+
+ log.info("Updating container: `{}`".format(item.name()))
+ return bool(lib.set_trackitem_openpype_tag(item, updated_container))
def launch_workfiles_app(*args):
@@ -272,11 +326,11 @@ def on_pyblish_instance_toggled(instance, old_value, new_value):
instance, old_value, new_value))
from openpype.hosts.hiero.api import (
- get_track_item_pype_tag,
+ get_trackitem_openpype_tag,
set_publish_attribute
)
# Whether instances should be passthrough based on new value
track_item = instance.data["item"]
- tag = get_track_item_pype_tag(track_item)
+ tag = get_trackitem_openpype_tag(track_item)
set_publish_attribute(tag, new_value)
diff --git a/openpype/hosts/hiero/api/tags.py b/openpype/hosts/hiero/api/tags.py
index fac26da03a..cb7bc14edb 100644
--- a/openpype/hosts/hiero/api/tags.py
+++ b/openpype/hosts/hiero/api/tags.py
@@ -1,3 +1,4 @@
+import json
import re
import os
import hiero
@@ -85,17 +86,16 @@ def update_tag(tag, data):
# get metadata key from data
data_mtd = data.get("metadata", {})
- # due to hiero bug we have to make sure keys which are not existent in
- # data are cleared of value by `None`
- for _mk in mtd.dict().keys():
- if _mk.replace("tag.", "") not in data_mtd.keys():
- mtd.setValue(_mk, str(None))
-
# set all data metadata to tag metadata
- for k, v in data_mtd.items():
+ for _k, _v in data_mtd.items():
+ value = str(_v)
+ if type(_v) == dict:
+ value = json.dumps(_v)
+
+ # set the value
mtd.setValue(
- "tag.{}".format(str(k)),
- str(v)
+ "tag.{}".format(str(_k)),
+ value
)
# set note description of tag
diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py
new file mode 100644
index 0000000000..a3fcd63b5b
--- /dev/null
+++ b/openpype/hosts/hiero/plugins/load/load_effects.py
@@ -0,0 +1,308 @@
+import json
+from collections import OrderedDict
+import six
+
+from openpype.client import (
+ get_version_by_id
+)
+
+from openpype.pipeline import (
+ AVALON_CONTAINER_ID,
+ load,
+ legacy_io,
+ get_representation_path
+)
+from openpype.hosts.hiero import api as phiero
+from openpype.lib import Logger
+
+
+class LoadEffects(load.LoaderPlugin):
+ """Loading colorspace soft effect exported from nukestudio"""
+
+ representations = ["effectJson"]
+ families = ["effect"]
+
+ label = "Load Effects"
+ order = 0
+ icon = "cc"
+ color = "white"
+
+ log = Logger.get_logger(__name__)
+
+ def load(self, context, name, namespace, data):
+ """
+ Loading function to get the soft effects to particular read node
+
+ Arguments:
+ context (dict): context of version
+ name (str): name of the version
+ namespace (str): asset name
+ data (dict): compulsory attribute > not used
+
+ Returns:
+ nuke node: containerised nuke node object
+ """
+ active_sequence = phiero.get_current_sequence()
+ active_track = phiero.get_current_track(
+ active_sequence, "Loaded_{}".format(name))
+
+ # get main variables
+ namespace = namespace or context["asset"]["name"]
+ object_name = "{}_{}".format(name, namespace)
+ clip_in = context["asset"]["data"]["clipIn"]
+ clip_out = context["asset"]["data"]["clipOut"]
+
+ data_imprint = {
+ "objectName": object_name,
+ "children_names": []
+ }
+
+ # getting file path
+ file = self.fname.replace("\\", "/")
+
+ if self._shared_loading(
+ file,
+ active_track,
+ clip_in,
+ clip_out,
+ data_imprint
+ ):
+ self.containerise(
+ active_track,
+ name=name,
+ namespace=namespace,
+ object_name=object_name,
+ context=context,
+ loader=self.__class__.__name__,
+ data=data_imprint)
+
+ def _shared_loading(
+ self,
+ file,
+ active_track,
+ clip_in,
+ clip_out,
+ data_imprint,
+ update=False
+ ):
+ # getting data from json file with unicode conversion
+ with open(file, "r") as f:
+ json_f = {self.byteify(key): self.byteify(value)
+ for key, value in json.load(f).items()}
+
+ # get correct order of nodes by positions on track and subtrack
+ nodes_order = self.reorder_nodes(json_f)
+
+ used_subtracks = {
+ stitem.name(): stitem
+ for stitem in phiero.flatten(active_track.subTrackItems())
+ }
+
+ loaded = False
+ for index_order, (ef_name, ef_val) in enumerate(nodes_order.items()):
+ new_name = "{}_loaded".format(ef_name)
+ if new_name not in used_subtracks:
+ effect_track_item = active_track.createEffect(
+ effectType=ef_val["class"],
+ timelineIn=clip_in,
+ timelineOut=clip_out,
+ subTrackIndex=index_order
+
+ )
+ effect_track_item.setName(new_name)
+ else:
+ effect_track_item = used_subtracks[new_name]
+
+ node = effect_track_item.node()
+ for knob_name, knob_value in ef_val["node"].items():
+ if (
+ not knob_value
+ or knob_name == "name"
+ ):
+ continue
+
+ try:
+ # assume list means animation
+ # except 4 values could be RGBA or vector
+ if isinstance(knob_value, list) and len(knob_value) > 4:
+ node[knob_name].setAnimated()
+ for i, value in enumerate(knob_value):
+ if isinstance(value, list):
+ # list can have vector animation
+ for ci, cv in enumerate(value):
+ node[knob_name].setValueAt(
+ cv,
+ (clip_in + i),
+ ci
+ )
+ else:
+ # list is single values
+ node[knob_name].setValueAt(
+ value,
+ (clip_in + i)
+ )
+ else:
+ node[knob_name].setValue(knob_value)
+ except NameError:
+ self.log.warning("Knob: {} cannot be set".format(
+ knob_name))
+
+ # register all loaded children
+ data_imprint["children_names"].append(new_name)
+
+ # make sure containerisation will happen
+ loaded = True
+
+ return loaded
+
+ def update(self, container, representation):
+ """ Updating previously loaded effects
+ """
+ active_track = container["_item"]
+ file = get_representation_path(representation).replace("\\", "/")
+
+ # get main variables
+ name = container['name']
+ namespace = container['namespace']
+
+ # get timeline in out data
+ project_name = legacy_io.active_project()
+ version_doc = get_version_by_id(project_name, representation["parent"])
+ version_data = version_doc["data"]
+ clip_in = version_data["clipIn"]
+ clip_out = version_data["clipOut"]
+
+ object_name = "{}_{}".format(name, namespace)
+
+ # Disable previously created nodes
+ used_subtracks = {
+ stitem.name(): stitem
+ for stitem in phiero.flatten(active_track.subTrackItems())
+ }
+ container = phiero.get_track_openpype_data(
+ active_track, object_name
+ )
+
+ loaded_subtrack_items = container["children_names"]
+ for loaded_stitem in loaded_subtrack_items:
+ if loaded_stitem not in used_subtracks:
+ continue
+ item_to_remove = used_subtracks.pop(loaded_stitem)
+ # TODO: find a way to erase nodes
+ self.log.debug(
+ "This node needs to be removed: {}".format(item_to_remove))
+
+ data_imprint = {
+ "objectName": object_name,
+ "name": name,
+ "representation": str(representation["_id"]),
+ "children_names": []
+ }
+
+ if self._shared_loading(
+ file,
+ active_track,
+ clip_in,
+ clip_out,
+ data_imprint,
+ update=True
+ ):
+ return phiero.update_container(active_track, data_imprint)
+
+ def reorder_nodes(self, data):
+ new_order = OrderedDict()
+ trackNums = [v["trackIndex"] for k, v in data.items()
+ if isinstance(v, dict)]
+ subTrackNums = [v["subTrackIndex"] for k, v in data.items()
+ if isinstance(v, dict)]
+
+ for trackIndex in range(
+ min(trackNums), max(trackNums) + 1):
+ for subTrackIndex in range(
+ min(subTrackNums), max(subTrackNums) + 1):
+ item = self.get_item(data, trackIndex, subTrackIndex)
+ if item is not {}:
+ new_order.update(item)
+ return new_order
+
+ def get_item(self, data, trackIndex, subTrackIndex):
+ return {key: val for key, val in data.items()
+ if isinstance(val, dict)
+ if subTrackIndex == val["subTrackIndex"]
+ if trackIndex == val["trackIndex"]}
+
+ def byteify(self, input):
+ """
+ Converts unicode strings to strings
+ It goes through all dictionary
+
+ Arguments:
+ input (dict/str): input
+
+ Returns:
+ dict: with fixed values and keys
+
+ """
+
+ if isinstance(input, dict):
+ return {self.byteify(key): self.byteify(value)
+ for key, value in input.items()}
+ elif isinstance(input, list):
+ return [self.byteify(element) for element in input]
+ elif isinstance(input, six.text_type):
+ return str(input)
+ else:
+ return input
+
+ def switch(self, container, representation):
+ self.update(container, representation)
+
+ def remove(self, container):
+ pass
+
+ def containerise(
+ self,
+ track,
+ name,
+ namespace,
+ object_name,
+ context,
+ loader=None,
+ data=None
+ ):
+ """Bundle Hiero's object into an assembly and imprint it with metadata
+
+ Containerisation enables a tracking of version, author and origin
+ for loaded assets.
+
+ Arguments:
+ track (hiero.core.VideoTrack): object to imprint as container
+ name (str): Name of resulting assembly
+ namespace (str): Namespace under which to host container
+ object_name (str): name of container
+ context (dict): Asset information
+ loader (str, optional): Name of node used to produce this
+ container.
+
+ Returns:
+ track_item (hiero.core.TrackItem): containerised object
+
+ """
+
+ data_imprint = {
+ object_name: {
+ "schema": "openpype:container-2.0",
+ "id": AVALON_CONTAINER_ID,
+ "name": str(name),
+ "namespace": str(namespace),
+ "loader": str(loader),
+ "representation": str(context["representation"]["_id"]),
+ }
+ }
+
+ if data:
+ for k, v in data.items():
+ data_imprint[object_name].update({k: v})
+
+ self.log.debug("_ data_imprint: {}".format(data_imprint))
+ phiero.set_track_openpype_tag(track, data_imprint)
diff --git a/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py b/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py
index 8d2ed9a9c2..9489b1c4fb 100644
--- a/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py
+++ b/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py
@@ -16,6 +16,9 @@ class CollectClipEffects(pyblish.api.InstancePlugin):
review_track_index = instance.context.data.get("reviewTrackIndex")
item = instance.data["item"]
+ if "audio" in instance.data["family"]:
+ return
+
# frame range
self.handle_start = instance.data["handleStart"]
self.handle_end = instance.data["handleEnd"]
diff --git a/openpype/hosts/hiero/plugins/publish/precollect_instances.py b/openpype/hosts/hiero/plugins/publish/precollect_instances.py
index 1fc4b1f696..bb02919b35 100644
--- a/openpype/hosts/hiero/plugins/publish/precollect_instances.py
+++ b/openpype/hosts/hiero/plugins/publish/precollect_instances.py
@@ -48,7 +48,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
self.log.debug("clip_name: {}".format(clip_name))
# get openpype tag data
- tag_data = phiero.get_track_item_pype_data(track_item)
+ tag_data = phiero.get_trackitem_openpype_data(track_item)
self.log.debug("__ tag_data: {}".format(pformat(tag_data)))
if not tag_data:
diff --git a/openpype/hosts/houdini/api/__init__.py b/openpype/hosts/houdini/api/__init__.py
index fddf7ab98d..2663a55f6f 100644
--- a/openpype/hosts/houdini/api/__init__.py
+++ b/openpype/hosts/houdini/api/__init__.py
@@ -1,24 +1,13 @@
from .pipeline import (
- install,
- uninstall,
-
+ HoudiniHost,
ls,
- containerise,
+ containerise
)
from .plugin import (
Creator,
)
-from .workio import (
- open_file,
- save_file,
- current_file,
- has_unsaved_changes,
- file_extensions,
- work_root
-)
-
from .lib import (
lsattr,
lsattrs,
@@ -29,22 +18,13 @@ from .lib import (
__all__ = [
- "install",
- "uninstall",
+ "HoudiniHost",
"ls",
"containerise",
"Creator",
- # Workfiles API
- "open_file",
- "save_file",
- "current_file",
- "has_unsaved_changes",
- "file_extensions",
- "work_root",
-
# Utility functions
"lsattr",
"lsattrs",
@@ -52,7 +32,3 @@ __all__ = [
"maintained_selection"
]
-
-# Backwards API compatibility
-open = open_file
-save = save_file
diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py
index c8a7f92bb9..13f5a62ec3 100644
--- a/openpype/hosts/houdini/api/lib.py
+++ b/openpype/hosts/houdini/api/lib.py
@@ -1,6 +1,10 @@
+# -*- coding: utf-8 -*-
+import sys
+import os
import uuid
import logging
from contextlib import contextmanager
+import json
import six
@@ -8,10 +12,13 @@ from openpype.client import get_asset_by_name
from openpype.pipeline import legacy_io
from openpype.pipeline.context_tools import get_current_project_asset
-
import hou
+
+self = sys.modules[__name__]
+self._parent = None
log = logging.getLogger(__name__)
+JSON_PREFIX = "JSON:::"
def get_asset_fps():
@@ -29,23 +36,18 @@ def set_id(node, unique_id, overwrite=False):
def get_id(node):
- """
- Get the `cbId` attribute of the given node
+ """Get the `cbId` attribute of the given node.
+
Args:
node (hou.Node): the name of the node to retrieve the attribute from
Returns:
- str
+ str: cbId attribute of the node.
"""
- if node is None:
- return
-
- id = node.parm("id")
- if node is None:
- return
- return id
+ if node is not None:
+ return node.parm("id")
def generate_ids(nodes, asset_id=None):
@@ -281,7 +283,7 @@ def render_rop(ropnode):
raise RuntimeError("Render failed: {0}".format(exc))
-def imprint(node, data):
+def imprint(node, data, update=False):
"""Store attributes with value on a node
Depending on the type of attribute it creates the correct parameter
@@ -290,49 +292,76 @@ def imprint(node, data):
http://www.sidefx.com/docs/houdini/hom/hou/ParmTemplate.html
+ Because of some update glitch where you cannot overwrite existing
+ ParmTemplates on node using:
+ `setParmTemplates()` and `parmTuplesInFolder()`
+ update is done in another pass.
+
Args:
node(hou.Node): node object from Houdini
data(dict): collection of attributes and their value
+ update (bool, optional): flag if imprint should update
+ already existing data or leave them untouched and only
+ add new.
Returns:
None
"""
+ if not data:
+ return
+ if not node:
+ self.log.error("Node is not set, calling imprint on invalid data.")
+ return
- parm_group = node.parmTemplateGroup()
+ current_parms = {p.name(): p for p in node.spareParms()}
+ update_parms = []
+ templates = []
- parm_folder = hou.FolderParmTemplate("folder", "Extra")
for key, value in data.items():
if value is None:
continue
- if isinstance(value, float):
- parm = hou.FloatParmTemplate(name=key,
- label=key,
- num_components=1,
- default_value=(value,))
- elif isinstance(value, bool):
- parm = hou.ToggleParmTemplate(name=key,
- label=key,
- default_value=value)
- elif isinstance(value, int):
- parm = hou.IntParmTemplate(name=key,
- label=key,
- num_components=1,
- default_value=(value,))
- elif isinstance(value, six.string_types):
- parm = hou.StringParmTemplate(name=key,
- label=key,
- num_components=1,
- default_value=(value,))
- else:
- raise TypeError("Unsupported type: %r" % type(value))
+ parm = get_template_from_value(key, value)
- parm_folder.addParmTemplate(parm)
+ if key in current_parms:
+ if node.evalParm(key) == data[key]:
+ continue
+ if not update:
+ log.debug(f"{key} already exists on {node}")
+ else:
+ log.debug(f"replacing {key}")
+ update_parms.append(parm)
+ continue
+
+ templates.append(parm)
+
+ parm_group = node.parmTemplateGroup()
+ parm_folder = parm_group.findFolder("Extra")
+
+ # if folder doesn't exist yet, create one and append to it,
+ # else append to existing one
+ if not parm_folder:
+ parm_folder = hou.FolderParmTemplate("folder", "Extra")
+ parm_folder.setParmTemplates(templates)
+ parm_group.append(parm_folder)
+ else:
+ for template in templates:
+ parm_group.appendToFolder(parm_folder, template)
+ # this is needed because the pointer to folder
+ # is for some reason lost every call to `appendToFolder()`
+ parm_folder = parm_group.findFolder("Extra")
- parm_group.append(parm_folder)
node.setParmTemplateGroup(parm_group)
+ # TODO: Updating is done here, by calling probably deprecated functions.
+ # This needs to be addressed in the future.
+ if not update_parms:
+ return
+
+ for parm in update_parms:
+ node.replaceSpareParmTuple(parm.name(), parm)
+
def lsattr(attr, value=None, root="/"):
"""Return nodes that have `attr`
@@ -397,8 +426,22 @@ def read(node):
"""
# `spareParms` returns a tuple of hou.Parm objects
- return {parameter.name(): parameter.eval() for
- parameter in node.spareParms()}
+ data = {}
+ if not node:
+ return data
+ for parameter in node.spareParms():
+ value = parameter.eval()
+ # test if value is json encoded dict
+ if isinstance(value, six.string_types) and \
+ value.startswith(JSON_PREFIX):
+ try:
+ value = json.loads(value[len(JSON_PREFIX):])
+ except json.JSONDecodeError:
+ # not a json
+ pass
+ data[parameter.name()] = value
+
+ return data
@contextmanager
@@ -460,3 +503,89 @@ def reset_framerange():
hou.playbar.setFrameRange(frame_start, frame_end)
hou.playbar.setPlaybackRange(frame_start, frame_end)
hou.setFrame(frame_start)
+
+
+def get_main_window():
+ """Acquire Houdini's main window"""
+ if self._parent is None:
+ self._parent = hou.ui.mainQtWindow()
+ return self._parent
+
+
+def get_template_from_value(key, value):
+ if isinstance(value, float):
+ parm = hou.FloatParmTemplate(name=key,
+ label=key,
+ num_components=1,
+ default_value=(value,))
+ elif isinstance(value, bool):
+ parm = hou.ToggleParmTemplate(name=key,
+ label=key,
+ default_value=value)
+ elif isinstance(value, int):
+ parm = hou.IntParmTemplate(name=key,
+ label=key,
+ num_components=1,
+ default_value=(value,))
+ elif isinstance(value, six.string_types):
+ parm = hou.StringParmTemplate(name=key,
+ label=key,
+ num_components=1,
+ default_value=(value,))
+ elif isinstance(value, (dict, list, tuple)):
+ parm = hou.StringParmTemplate(name=key,
+ label=key,
+ num_components=1,
+ default_value=(
+ JSON_PREFIX + json.dumps(value),))
+ else:
+ raise TypeError("Unsupported type: %r" % type(value))
+
+ return parm
+
+
+def get_frame_data(node):
+ """Get the frame data: start frame, end frame and steps.
+
+ Args:
+ node(hou.Node)
+
+ Returns:
+ dict: frame data for star, end and steps.
+
+ """
+ data = {}
+
+ if node.parm("trange") is None:
+
+ return data
+
+ if node.evalParm("trange") == 0:
+ self.log.debug("trange is 0")
+ return data
+
+ data["frameStart"] = node.evalParm("f1")
+ data["frameEnd"] = node.evalParm("f2")
+ data["steps"] = node.evalParm("f3")
+
+ return data
+
+
+def splitext(name, allowed_multidot_extensions):
+ # type: (str, list) -> tuple
+ """Split file name to name and extension.
+
+ Args:
+ name (str): File name to split.
+ allowed_multidot_extensions (list of str): List of allowed multidot
+ extensions.
+
+ Returns:
+ tuple: Name and extension.
+ """
+
+ for ext in allowed_multidot_extensions:
+ if name.endswith(ext):
+ return name[:-len(ext)], ext
+
+ return os.path.splitext(name)
diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py
index e4af1913ef..b0791fcb6c 100644
--- a/openpype/hosts/houdini/api/pipeline.py
+++ b/openpype/hosts/houdini/api/pipeline.py
@@ -1,9 +1,13 @@
+# -*- coding: utf-8 -*-
+"""Pipeline tools for OpenPype Houdini integration."""
import os
import sys
import logging
import contextlib
-import hou
+import hou # noqa
+
+from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher
import pyblish.api
@@ -26,6 +30,7 @@ from .lib import get_asset_fps
log = logging.getLogger("openpype.hosts.houdini")
AVALON_CONTAINERS = "/obj/AVALON_CONTAINERS"
+CONTEXT_CONTAINER = "/obj/OpenPypeContext"
IS_HEADLESS = not hasattr(hou, "ui")
PLUGINS_DIR = os.path.join(HOUDINI_HOST_DIR, "plugins")
@@ -35,71 +40,139 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
-self = sys.modules[__name__]
-self._has_been_setup = False
-self._parent = None
-self._events = dict()
+class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher):
+ name = "houdini"
+ def __init__(self):
+ super(HoudiniHost, self).__init__()
+ self._op_events = {}
+ self._has_been_setup = False
-def install():
- _register_callbacks()
+ def install(self):
+ pyblish.api.register_host("houdini")
+ pyblish.api.register_host("hython")
+ pyblish.api.register_host("hpython")
- pyblish.api.register_host("houdini")
- pyblish.api.register_host("hython")
- pyblish.api.register_host("hpython")
+ pyblish.api.register_plugin_path(PUBLISH_PATH)
+ register_loader_plugin_path(LOAD_PATH)
+ register_creator_plugin_path(CREATE_PATH)
- pyblish.api.register_plugin_path(PUBLISH_PATH)
- register_loader_plugin_path(LOAD_PATH)
- register_creator_plugin_path(CREATE_PATH)
+ log.info("Installing callbacks ... ")
+ # register_event_callback("init", on_init)
+ self._register_callbacks()
+ register_event_callback("before.save", before_save)
+ register_event_callback("save", on_save)
+ register_event_callback("open", on_open)
+ register_event_callback("new", on_new)
- log.info("Installing callbacks ... ")
- # register_event_callback("init", on_init)
- register_event_callback("before.save", before_save)
- register_event_callback("save", on_save)
- register_event_callback("open", on_open)
- register_event_callback("new", on_new)
+ pyblish.api.register_callback(
+ "instanceToggled", on_pyblish_instance_toggled
+ )
- pyblish.api.register_callback(
- "instanceToggled", on_pyblish_instance_toggled
- )
+ self._has_been_setup = True
+ # add houdini vendor packages
+ hou_pythonpath = os.path.join(HOUDINI_HOST_DIR, "vendor")
- self._has_been_setup = True
- # add houdini vendor packages
- hou_pythonpath = os.path.join(HOUDINI_HOST_DIR, "vendor")
+ sys.path.append(hou_pythonpath)
- sys.path.append(hou_pythonpath)
+ # Set asset settings for the empty scene directly after launch of
+ # Houdini so it initializes into the correct scene FPS,
+ # Frame Range, etc.
+ # TODO: make sure this doesn't trigger when
+ # opening with last workfile.
+ _set_context_settings()
+ shelves.generate_shelves()
- # Set asset settings for the empty scene directly after launch of Houdini
- # so it initializes into the correct scene FPS, Frame Range, etc.
- # todo: make sure this doesn't trigger when opening with last workfile
- _set_context_settings()
- shelves.generate_shelves()
+ def has_unsaved_changes(self):
+ return hou.hipFile.hasUnsavedChanges()
+ def get_workfile_extensions(self):
+ return [".hip", ".hiplc", ".hipnc"]
-def uninstall():
- """Uninstall Houdini-specific functionality of avalon-core.
+ def save_workfile(self, dst_path=None):
+ # Force forwards slashes to avoid segfault
+ if dst_path:
+ dst_path = dst_path.replace("\\", "/")
+ hou.hipFile.save(file_name=dst_path,
+ save_to_recent_files=True)
+ return dst_path
- This function is called automatically on calling `api.uninstall()`.
- """
+ def open_workfile(self, filepath):
+ # Force forwards slashes to avoid segfault
+ filepath = filepath.replace("\\", "/")
- pyblish.api.deregister_host("hython")
- pyblish.api.deregister_host("hpython")
- pyblish.api.deregister_host("houdini")
+ hou.hipFile.load(filepath,
+ suppress_save_prompt=True,
+ ignore_load_warnings=False)
+ return filepath
-def _register_callbacks():
- for event in self._events.copy().values():
- if event is None:
- continue
+ def get_current_workfile(self):
+ current_filepath = hou.hipFile.path()
+ if (os.path.basename(current_filepath) == "untitled.hip" and
+ not os.path.exists(current_filepath)):
+ # By default a new scene in houdini is saved in the current
+ # working directory as "untitled.hip" so we need to capture
+ # that and consider it 'not saved' when it's in that state.
+ return None
- try:
- hou.hipFile.removeEventCallback(event)
- except RuntimeError as e:
- log.info(e)
+ return current_filepath
- self._events[on_file_event_callback] = hou.hipFile.addEventCallback(
- on_file_event_callback
- )
+ def get_containers(self):
+ return ls()
+
+ def _register_callbacks(self):
+ for event in self._op_events.copy().values():
+ if event is None:
+ continue
+
+ try:
+ hou.hipFile.removeEventCallback(event)
+ except RuntimeError as e:
+ log.info(e)
+
+ self._op_events[on_file_event_callback] = hou.hipFile.addEventCallback(
+ on_file_event_callback
+ )
+
+ @staticmethod
+ def create_context_node():
+ """Helper for creating context holding node.
+
+ Returns:
+ hou.Node: context node
+
+ """
+ obj_network = hou.node("/obj")
+ op_ctx = obj_network.createNode(
+ "null", node_name="OpenPypeContext")
+ op_ctx.moveToGoodPosition()
+ op_ctx.setBuiltExplicitly(False)
+ op_ctx.setCreatorState("OpenPype")
+ op_ctx.setComment("OpenPype node to hold context metadata")
+ op_ctx.setColor(hou.Color((0.081, 0.798, 0.810)))
+ op_ctx.hide(True)
+ return op_ctx
+
+ def update_context_data(self, data, changes):
+ op_ctx = hou.node(CONTEXT_CONTAINER)
+ if not op_ctx:
+ op_ctx = self.create_context_node()
+
+ lib.imprint(op_ctx, data)
+
+ def get_context_data(self):
+ op_ctx = hou.node(CONTEXT_CONTAINER)
+ if not op_ctx:
+ op_ctx = self.create_context_node()
+ return lib.read(op_ctx)
+
+ def save_file(self, dst_path=None):
+ # Force forwards slashes to avoid segfault
+ dst_path = dst_path.replace("\\", "/")
+
+ hou.hipFile.save(file_name=dst_path,
+ save_to_recent_files=True)
def on_file_event_callback(event):
@@ -113,22 +186,6 @@ def on_file_event_callback(event):
emit_event("new")
-def get_main_window():
- """Acquire Houdini's main window"""
- if self._parent is None:
- self._parent = hou.ui.mainQtWindow()
- return self._parent
-
-
-def teardown():
- """Remove integration"""
- if not self._has_been_setup:
- return
-
- self._has_been_setup = False
- print("pyblish: Integration torn down successfully")
-
-
def containerise(name,
namespace,
nodes,
@@ -251,7 +308,7 @@ def on_open():
log.warning("Scene has outdated content.")
# Get main window
- parent = get_main_window()
+ parent = lib.get_main_window()
if parent is None:
log.info("Skipping outdated content pop-up "
"because Houdini window can't be found.")
diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py
index 2bbb65aa05..e15e27c83f 100644
--- a/openpype/hosts/houdini/api/plugin.py
+++ b/openpype/hosts/houdini/api/plugin.py
@@ -1,14 +1,19 @@
# -*- coding: utf-8 -*-
"""Houdini specific Avalon/Pyblish plugin definitions."""
import sys
+from abc import (
+ ABCMeta
+)
import six
-
import hou
from openpype.pipeline import (
CreatorError,
- LegacyCreator
+ LegacyCreator,
+ Creator as NewCreator,
+ CreatedInstance
)
-from .lib import imprint
+from openpype.lib import BoolDef
+from .lib import imprint, read, lsattr
class OpenPypeCreatorError(CreatorError):
@@ -30,12 +35,15 @@ class Creator(LegacyCreator):
when hovering over a node. The information is visible under the name of
the node.
+ Deprecated:
+ This creator is deprecated and will be removed in future version.
+
"""
defaults = ['Main']
def __init__(self, *args, **kwargs):
super(Creator, self).__init__(*args, **kwargs)
- self.nodes = list()
+ self.nodes = []
def process(self):
"""This is the base functionality to create instances in Houdini
@@ -84,3 +92,187 @@ class Creator(LegacyCreator):
OpenPypeCreatorError,
OpenPypeCreatorError("Creator error: {}".format(er)),
sys.exc_info()[2])
+
+
+class HoudiniCreatorBase(object):
+ @staticmethod
+ def cache_subsets(shared_data):
+ """Cache instances for Creators to shared data.
+
+ Create `houdini_cached_subsets` key when needed in shared data and
+ fill it with all collected instances from the scene under its
+ respective creator identifiers.
+
+ If legacy instances are detected in the scene, create
+ `houdini_cached_legacy_subsets` there and fill it with
+ all legacy subsets under family as a key.
+
+ Args:
+ Dict[str, Any]: Shared data.
+
+ Return:
+ Dict[str, Any]: Shared data dictionary.
+
+ """
+ if shared_data.get("houdini_cached_subsets") is None:
+ shared_data["houdini_cached_subsets"] = {}
+ if shared_data.get("houdini_cached_legacy_subsets") is None:
+ shared_data["houdini_cached_legacy_subsets"] = {}
+ cached_instances = lsattr("id", "pyblish.avalon.instance")
+ for i in cached_instances:
+ if not i.parm("creator_identifier"):
+ # we have legacy instance
+ family = i.parm("family").eval()
+ if family not in shared_data[
+ "houdini_cached_legacy_subsets"]:
+ shared_data["houdini_cached_legacy_subsets"][
+ family] = [i]
+ else:
+ shared_data[
+ "houdini_cached_legacy_subsets"][family].append(i)
+ continue
+
+ creator_id = i.parm("creator_identifier").eval()
+ if creator_id not in shared_data["houdini_cached_subsets"]:
+ shared_data["houdini_cached_subsets"][creator_id] = [i]
+ else:
+ shared_data[
+ "houdini_cached_subsets"][creator_id].append(i) # noqa
+ return shared_data
+
+ @staticmethod
+ def create_instance_node(
+ node_name, parent,
+ node_type="geometry"):
+ # type: (str, str, str) -> hou.Node
+ """Create node representing instance.
+
+ Arguments:
+ node_name (str): Name of the new node.
+ parent (str): Name of the parent node.
+ node_type (str, optional): Type of the node.
+
+ Returns:
+ hou.Node: Newly created instance node.
+
+ """
+ parent_node = hou.node(parent)
+ instance_node = parent_node.createNode(
+ node_type, node_name=node_name)
+ instance_node.moveToGoodPosition()
+ return instance_node
+
+
+@six.add_metaclass(ABCMeta)
+class HoudiniCreator(NewCreator, HoudiniCreatorBase):
+ """Base class for most of the Houdini creator plugins."""
+ selected_nodes = []
+
+ def create(self, subset_name, instance_data, pre_create_data):
+ try:
+ if pre_create_data.get("use_selection"):
+ self.selected_nodes = hou.selectedNodes()
+
+ # Get the node type and remove it from the data, not needed
+ node_type = instance_data.pop("node_type", None)
+ if node_type is None:
+ node_type = "geometry"
+
+ instance_node = self.create_instance_node(
+ subset_name, "/out", node_type)
+
+ self.customize_node_look(instance_node)
+
+ instance_data["instance_node"] = instance_node.path()
+ instance = CreatedInstance(
+ self.family,
+ subset_name,
+ instance_data,
+ self)
+ self._add_instance_to_context(instance)
+ imprint(instance_node, instance.data_to_store())
+ return instance
+
+ except hou.Error as er:
+ six.reraise(
+ OpenPypeCreatorError,
+ OpenPypeCreatorError("Creator error: {}".format(er)),
+ sys.exc_info()[2])
+
+ def lock_parameters(self, node, parameters):
+ """Lock list of specified parameters on the node.
+
+ Args:
+ node (hou.Node): Houdini node to lock parameters on.
+ parameters (list of str): List of parameter names.
+
+ """
+ for name in parameters:
+ try:
+ parm = node.parm(name)
+ parm.lock(True)
+ except AttributeError:
+ self.log.debug("missing lock pattern {}".format(name))
+
+ def collect_instances(self):
+ # cache instances if missing
+ self.cache_subsets(self.collection_shared_data)
+ for instance in self.collection_shared_data[
+ "houdini_cached_subsets"].get(self.identifier, []):
+ created_instance = CreatedInstance.from_existing(
+ read(instance), self
+ )
+ self._add_instance_to_context(created_instance)
+
+ def update_instances(self, update_list):
+ for created_inst, _changes in update_list:
+ instance_node = hou.node(created_inst.get("instance_node"))
+
+ new_values = {
+ key: new_value
+ for key, (_old_value, new_value) in _changes.items()
+ }
+ imprint(
+ instance_node,
+ new_values,
+ update=True
+ )
+
+ def remove_instances(self, instances):
+ """Remove specified instance from the scene.
+
+ This is only removing `id` parameter so instance is no longer
+ instance, because it might contain valuable data for artist.
+
+ """
+ for instance in instances:
+ instance_node = hou.node(instance.data.get("instance_node"))
+ if instance_node:
+ instance_node.destroy()
+
+ self._remove_instance_from_context(instance)
+
+ def get_pre_create_attr_defs(self):
+ return [
+ BoolDef("use_selection", label="Use selection")
+ ]
+
+ @staticmethod
+ def customize_node_look(
+ node, color=None,
+ shape="chevron_down"):
+ """Set custom look for instance nodes.
+
+ Args:
+ node (hou.Node): Node to set look.
+ color (hou.Color, Optional): Color of the node.
+ shape (str, Optional): Shape name of the node.
+
+ Returns:
+ None
+
+ """
+ if not color:
+ color = hou.Color((0.616, 0.871, 0.769))
+ node.setUserData('nodeshape', shape)
+ node.setColor(color)
diff --git a/openpype/hosts/houdini/api/workio.py b/openpype/hosts/houdini/api/workio.py
deleted file mode 100644
index 5f7efff333..0000000000
--- a/openpype/hosts/houdini/api/workio.py
+++ /dev/null
@@ -1,57 +0,0 @@
-"""Host API required Work Files tool"""
-import os
-
-import hou
-
-
-def file_extensions():
- return [".hip", ".hiplc", ".hipnc"]
-
-
-def has_unsaved_changes():
- return hou.hipFile.hasUnsavedChanges()
-
-
-def save_file(filepath):
-
- # Force forwards slashes to avoid segfault
- filepath = filepath.replace("\\", "/")
-
- hou.hipFile.save(file_name=filepath,
- save_to_recent_files=True)
-
- return filepath
-
-
-def open_file(filepath):
-
- # Force forwards slashes to avoid segfault
- filepath = filepath.replace("\\", "/")
-
- hou.hipFile.load(filepath,
- suppress_save_prompt=True,
- ignore_load_warnings=False)
-
- return filepath
-
-
-def current_file():
-
- current_filepath = hou.hipFile.path()
- if (os.path.basename(current_filepath) == "untitled.hip" and
- not os.path.exists(current_filepath)):
- # By default a new scene in houdini is saved in the current
- # working directory as "untitled.hip" so we need to capture
- # that and consider it 'not saved' when it's in that state.
- return None
-
- return current_filepath
-
-
-def work_root(session):
- work_dir = session["AVALON_WORKDIR"]
- scene_dir = session.get("AVALON_SCENEDIR")
- if scene_dir:
- return os.path.join(work_dir, scene_dir)
- else:
- return work_dir
diff --git a/openpype/hosts/houdini/plugins/create/convert_legacy.py b/openpype/hosts/houdini/plugins/create/convert_legacy.py
new file mode 100644
index 0000000000..4b8041b4f5
--- /dev/null
+++ b/openpype/hosts/houdini/plugins/create/convert_legacy.py
@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+"""Convertor for legacy Houdini subsets."""
+from openpype.pipeline.create.creator_plugins import SubsetConvertorPlugin
+from openpype.hosts.houdini.api.lib import imprint
+
+
+class HoudiniLegacyConvertor(SubsetConvertorPlugin):
+ """Find and convert any legacy subsets in the scene.
+
+ This Convertor will find all legacy subsets in the scene and will
+ transform them to the current system. Since the old subsets doesn't
+ retain any information about their original creators, the only mapping
+ we can do is based on their families.
+
+ Its limitation is that you can have multiple creators creating subset
+ of the same family and there is no way to handle it. This code should
+ nevertheless cover all creators that came with OpenPype.
+
+ """
+ identifier = "io.openpype.creators.houdini.legacy"
+ family_to_id = {
+ "camera": "io.openpype.creators.houdini.camera",
+ "ass": "io.openpype.creators.houdini.ass",
+ "imagesequence": "io.openpype.creators.houdini.imagesequence",
+ "hda": "io.openpype.creators.houdini.hda",
+ "pointcache": "io.openpype.creators.houdini.pointcache",
+ "redshiftproxy": "io.openpype.creators.houdini.redshiftproxy",
+ "redshift_rop": "io.openpype.creators.houdini.redshift_rop",
+ "usd": "io.openpype.creators.houdini.usd",
+ "usdrender": "io.openpype.creators.houdini.usdrender",
+ "vdbcache": "io.openpype.creators.houdini.vdbcache"
+ }
+
+ def __init__(self, *args, **kwargs):
+ super(HoudiniLegacyConvertor, self).__init__(*args, **kwargs)
+ self.legacy_subsets = {}
+
+ def find_instances(self):
+ """Find legacy subsets in the scene.
+
+ Legacy subsets are the ones that doesn't have `creator_identifier`
+ parameter on them.
+
+ This is using cached entries done in
+ :py:meth:`~HoudiniCreatorBase.cache_subsets()`
+
+ """
+ self.legacy_subsets = self.collection_shared_data.get(
+ "houdini_cached_legacy_subsets")
+ if not self.legacy_subsets:
+ return
+ self.add_convertor_item("Found {} incompatible subset{}.".format(
+ len(self.legacy_subsets), "s" if len(self.legacy_subsets) > 1 else "")
+ )
+
+ def convert(self):
+ """Convert all legacy subsets to current.
+
+ It is enough to add `creator_identifier` and `instance_node`.
+
+ """
+ if not self.legacy_subsets:
+ return
+
+ for family, subsets in self.legacy_subsets.items():
+ if family in self.family_to_id:
+ for subset in subsets:
+ data = {
+ "creator_identifier": self.family_to_id[family],
+ "instance_node": subset.path()
+ }
+ self.log.info("Converting {} to {}".format(
+ subset.path(), self.family_to_id[family]))
+ imprint(subset, data)
diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py
index eef86005f5..fec64eb4a1 100644
--- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py
+++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py
@@ -1,46 +1,49 @@
+# -*- coding: utf-8 -*-
+"""Creator plugin for creating alembic camera subsets."""
from openpype.hosts.houdini.api import plugin
+from openpype.pipeline import CreatedInstance, CreatorError
-class CreateAlembicCamera(plugin.Creator):
- """Single baked camera from Alembic ROP"""
+class CreateAlembicCamera(plugin.HoudiniCreator):
+ """Single baked camera from Alembic ROP."""
- name = "camera"
+ identifier = "io.openpype.creators.houdini.camera"
label = "Camera (Abc)"
family = "camera"
icon = "camera"
- def __init__(self, *args, **kwargs):
- super(CreateAlembicCamera, self).__init__(*args, **kwargs)
+ def create(self, subset_name, instance_data, pre_create_data):
+ import hou
- # Remove the active, we are checking the bypass flag of the nodes
- self.data.pop("active", None)
+ instance_data.pop("active", None)
+ instance_data.update({"node_type": "alembic"})
- # Set node type to create for output
- self.data.update({"node_type": "alembic"})
+ instance = super(CreateAlembicCamera, self).create(
+ subset_name,
+ instance_data,
+ pre_create_data) # type: CreatedInstance
- def _process(self, instance):
- """Creator main entry point.
-
- Args:
- instance (hou.Node): Created Houdini instance.
-
- """
+ instance_node = hou.node(instance.get("instance_node"))
parms = {
- "filename": "$HIP/pyblish/%s.abc" % self.name,
+ "filename": hou.text.expandString(
+ "$HIP/pyblish/{}.abc".format(subset_name)),
"use_sop_path": False,
}
- if self.nodes:
- node = self.nodes[0]
- path = node.path()
+ if self.selected_nodes:
+ if len(self.selected_nodes) > 1:
+ raise CreatorError("More than one item selected.")
+ path = self.selected_nodes[0].path()
# Split the node path into the first root and the remainder
# So we can set the root and objects parameters correctly
_, root, remainder = path.split("/", 2)
parms.update({"root": "/" + root, "objects": remainder})
- instance.setParms(parms)
+ instance_node.setParms(parms)
# Lock the Use Sop Path setting so the
# user doesn't accidentally enable it.
- instance.parm("use_sop_path").lock(True)
- instance.parm("trange").set(1)
+ to_lock = ["use_sop_path"]
+ self.lock_parameters(instance_node, to_lock)
+
+ instance_node.parm("trange").set(1)
diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py
index 72088e43b0..8b310753d0 100644
--- a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py
+++ b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py
@@ -1,9 +1,12 @@
+# -*- coding: utf-8 -*-
+"""Creator plugin for creating Arnold ASS files."""
from openpype.hosts.houdini.api import plugin
-class CreateArnoldAss(plugin.Creator):
+class CreateArnoldAss(plugin.HoudiniCreator):
"""Arnold .ass Archive"""
+ identifier = "io.openpype.creators.houdini.ass"
label = "Arnold ASS"
family = "ass"
icon = "magic"
@@ -12,42 +15,39 @@ class CreateArnoldAss(plugin.Creator):
# Default extension: `.ass` or `.ass.gz`
ext = ".ass"
- def __init__(self, *args, **kwargs):
- super(CreateArnoldAss, self).__init__(*args, **kwargs)
+ def create(self, subset_name, instance_data, pre_create_data):
+ import hou
- # Remove the active, we are checking the bypass flag of the nodes
- self.data.pop("active", None)
+ instance_data.pop("active", None)
+ instance_data.update({"node_type": "arnold"})
- self.data.update({"node_type": "arnold"})
+ instance = super(CreateArnoldAss, self).create(
+ subset_name,
+ instance_data,
+ pre_create_data) # type: plugin.CreatedInstance
- def process(self):
- node = super(CreateArnoldAss, self).process()
-
- basename = node.name()
- node.setName(basename + "_ASS", unique_name=True)
+ instance_node = hou.node(instance.get("instance_node"))
# Hide Properties Tab on Arnold ROP since that's used
# for rendering instead of .ass Archive Export
- parm_template_group = node.parmTemplateGroup()
+ parm_template_group = instance_node.parmTemplateGroup()
parm_template_group.hideFolder("Properties", True)
- node.setParmTemplateGroup(parm_template_group)
+ instance_node.setParmTemplateGroup(parm_template_group)
- filepath = '$HIP/pyblish/`chs("subset")`.$F4{}'.format(self.ext)
+ filepath = "{}{}".format(
+ hou.text.expandString("$HIP/pyblish/"),
+ "{}.$F4{}".format(subset_name, self.ext)
+ )
parms = {
# Render frame range
"trange": 1,
-
# Arnold ROP settings
"ar_ass_file": filepath,
"ar_ass_export_enable": 1
}
- node.setParms(parms)
- # Lock the ASS export attribute
- node.parm("ar_ass_export_enable").lock(True)
+ instance_node.setParms(parms)
- # Lock some Avalon attributes
- to_lock = ["family", "id"]
- for name in to_lock:
- parm = node.parm(name)
- parm.lock(True)
+ # Lock any parameters in this list
+ to_lock = ["ar_ass_export_enable", "family", "id"]
+ self.lock_parameters(instance_node, to_lock)
diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py
index e278708076..45af2b0630 100644
--- a/openpype/hosts/houdini/plugins/create/create_composite.py
+++ b/openpype/hosts/houdini/plugins/create/create_composite.py
@@ -1,44 +1,42 @@
+# -*- coding: utf-8 -*-
+"""Creator plugin for creating composite sequences."""
from openpype.hosts.houdini.api import plugin
+from openpype.pipeline import CreatedInstance
-class CreateCompositeSequence(plugin.Creator):
+class CreateCompositeSequence(plugin.HoudiniCreator):
"""Composite ROP to Image Sequence"""
+ identifier = "io.openpype.creators.houdini.imagesequence"
label = "Composite (Image Sequence)"
family = "imagesequence"
icon = "gears"
- def __init__(self, *args, **kwargs):
- super(CreateCompositeSequence, self).__init__(*args, **kwargs)
+ ext = ".exr"
- # Remove the active, we are checking the bypass flag of the nodes
- self.data.pop("active", None)
+ def create(self, subset_name, instance_data, pre_create_data):
+ import hou # noqa
- # Type of ROP node to create
- self.data.update({"node_type": "comp"})
+ instance_data.pop("active", None)
+ instance_data.update({"node_type": "comp"})
- def _process(self, instance):
- """Creator main entry point.
+ instance = super(CreateCompositeSequence, self).create(
+ subset_name,
+ instance_data,
+ pre_create_data) # type: CreatedInstance
- Args:
- instance (hou.Node): Created Houdini instance.
+ instance_node = hou.node(instance.get("instance_node"))
+ filepath = "{}{}".format(
+ hou.text.expandString("$HIP/pyblish/"),
+ "{}.$F4{}".format(subset_name, self.ext)
+ )
+ parms = {
+ "trange": 1,
+ "copoutput": filepath
+ }
- """
- parms = {"copoutput": "$HIP/pyblish/%s.$F4.exr" % self.name}
-
- if self.nodes:
- node = self.nodes[0]
- parms.update({"coppath": node.path()})
-
- instance.setParms(parms)
+ instance_node.setParms(parms)
# Lock any parameters in this list
to_lock = ["prim_to_detail_pattern"]
- for name in to_lock:
- try:
- parm = instance.parm(name)
- parm.lock(True)
- except AttributeError:
- # missing lock pattern
- self.log.debug(
- "missing lock pattern {}".format(name))
+ self.lock_parameters(instance_node, to_lock)
diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py
index b98da8b8bb..4bed83c2e9 100644
--- a/openpype/hosts/houdini/plugins/create/create_hda.py
+++ b/openpype/hosts/houdini/plugins/create/create_hda.py
@@ -1,28 +1,22 @@
# -*- coding: utf-8 -*-
-import hou
-
+"""Creator plugin for creating publishable Houdini Digital Assets."""
from openpype.client import (
get_asset_by_name,
get_subsets,
)
from openpype.pipeline import legacy_io
-from openpype.hosts.houdini.api import lib
from openpype.hosts.houdini.api import plugin
-class CreateHDA(plugin.Creator):
+class CreateHDA(plugin.HoudiniCreator):
"""Publish Houdini Digital Asset file."""
- name = "hda"
+ identifier = "io.openpype.creators.houdini.hda"
label = "Houdini Digital Asset (Hda)"
family = "hda"
icon = "gears"
maintain_selection = False
- def __init__(self, *args, **kwargs):
- super(CreateHDA, self).__init__(*args, **kwargs)
- self.data.pop("active", None)
-
def _check_existing(self, subset_name):
# type: (str) -> bool
"""Check if existing subset name versions already exists."""
@@ -40,55 +34,51 @@ class CreateHDA(plugin.Creator):
}
return subset_name.lower() in existing_subset_names_low
- def _process(self, instance):
- subset_name = self.data["subset"]
- # get selected nodes
- out = hou.node("/obj")
- self.nodes = hou.selectedNodes()
+ def _create_instance_node(
+ self, node_name, parent, node_type="geometry"):
+ import hou
- if (self.options or {}).get("useSelection") and self.nodes:
- # if we have `use selection` enabled and we have some
+ parent_node = hou.node("/obj")
+ if self.selected_nodes:
+ # if we have `use selection` enabled, and we have some
# selected nodes ...
- subnet = out.collapseIntoSubnet(
- self.nodes,
- subnet_name="{}_subnet".format(self.name))
+ subnet = parent_node.collapseIntoSubnet(
+ self.selected_nodes,
+ subnet_name="{}_subnet".format(node_name))
subnet.moveToGoodPosition()
to_hda = subnet
else:
- to_hda = out.createNode(
- "subnet", node_name="{}_subnet".format(self.name))
+ to_hda = parent_node.createNode(
+ "subnet", node_name="{}_subnet".format(node_name))
if not to_hda.type().definition():
# if node type has not its definition, it is not user
# created hda. We test if hda can be created from the node.
if not to_hda.canCreateDigitalAsset():
- raise Exception(
+ raise plugin.OpenPypeCreatorError(
"cannot create hda from node {}".format(to_hda))
hda_node = to_hda.createDigitalAsset(
- name=subset_name,
- hda_file_name="$HIP/{}.hda".format(subset_name)
+ name=node_name,
+ hda_file_name="$HIP/{}.hda".format(node_name)
)
hda_node.layoutChildren()
- elif self._check_existing(subset_name):
+ elif self._check_existing(node_name):
raise plugin.OpenPypeCreatorError(
("subset {} is already published with different HDA"
- "definition.").format(subset_name))
+ "definition.").format(node_name))
else:
hda_node = to_hda
- hda_node.setName(subset_name)
-
- # delete node created by Avalon in /out
- # this needs to be addressed in future Houdini workflow refactor.
-
- hou.node("/out/{}".format(subset_name)).destroy()
-
- try:
- lib.imprint(hda_node, self.data)
- except hou.OperationFailed:
- raise plugin.OpenPypeCreatorError(
- ("Cannot set metadata on asset. Might be that it already is "
- "OpenPype asset.")
- )
-
+ hda_node.setName(node_name)
+ self.customize_node_look(hda_node)
return hda_node
+
+ def create(self, subset_name, instance_data, pre_create_data):
+ instance_data.pop("active", None)
+
+ instance = super(CreateHDA, self).create(
+ subset_name,
+ instance_data,
+ pre_create_data) # type: plugin.CreatedInstance
+
+ return instance
diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py
index feb683edf6..6b6b277422 100644
--- a/openpype/hosts/houdini/plugins/create/create_pointcache.py
+++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py
@@ -1,48 +1,51 @@
+# -*- coding: utf-8 -*-
+"""Creator plugin for creating pointcache alembics."""
from openpype.hosts.houdini.api import plugin
+from openpype.pipeline import CreatedInstance
-class CreatePointCache(plugin.Creator):
+class CreatePointCache(plugin.HoudiniCreator):
"""Alembic ROP to pointcache"""
-
- name = "pointcache"
+ identifier = "io.openpype.creators.houdini.pointcache"
label = "Point Cache"
family = "pointcache"
icon = "gears"
- def __init__(self, *args, **kwargs):
- super(CreatePointCache, self).__init__(*args, **kwargs)
+ def create(self, subset_name, instance_data, pre_create_data):
+ import hou
- # Remove the active, we are checking the bypass flag of the nodes
- self.data.pop("active", None)
+ instance_data.pop("active", None)
+ instance_data.update({"node_type": "alembic"})
- self.data.update({"node_type": "alembic"})
+ instance = super(CreatePointCache, self).create(
+ subset_name,
+ instance_data,
+ pre_create_data) # type: CreatedInstance
- def _process(self, instance):
- """Creator main entry point.
-
- Args:
- instance (hou.Node): Created Houdini instance.
-
- """
+ instance_node = hou.node(instance.get("instance_node"))
parms = {
- "use_sop_path": True, # Export single node from SOP Path
- "build_from_path": True, # Direct path of primitive in output
- "path_attrib": "path", # Pass path attribute for output
+ "use_sop_path": True,
+ "build_from_path": True,
+ "path_attrib": "path",
"prim_to_detail_pattern": "cbId",
- "format": 2, # Set format to Ogawa
- "facesets": 0, # No face sets (by default exclude them)
- "filename": "$HIP/pyblish/%s.abc" % self.name,
+ "format": 2,
+ "facesets": 0,
+ "filename": hou.text.expandString(
+ "$HIP/pyblish/{}.abc".format(subset_name))
}
- if self.nodes:
- node = self.nodes[0]
- parms.update({"sop_path": node.path()})
+ if self.selected_nodes:
+ parms["sop_path"] = self.selected_nodes[0].path()
- instance.setParms(parms)
- instance.parm("trange").set(1)
+ # try to find output node
+ for child in self.selected_nodes[0].children():
+ if child.type().name() == "output":
+ parms["sop_path"] = child.path()
+ break
+
+ instance_node.setParms(parms)
+ instance_node.parm("trange").set(1)
# Lock any parameters in this list
to_lock = ["prim_to_detail_pattern"]
- for name in to_lock:
- parm = instance.parm(name)
- parm.lock(True)
+ self.lock_parameters(instance_node, to_lock)
diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py
index da4d80bf2b..8b6a68437b 100644
--- a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py
+++ b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py
@@ -1,18 +1,20 @@
+# -*- coding: utf-8 -*-
+"""Creator plugin for creating Redshift proxies."""
from openpype.hosts.houdini.api import plugin
+from openpype.pipeline import CreatedInstance
-class CreateRedshiftProxy(plugin.Creator):
+class CreateRedshiftProxy(plugin.HoudiniCreator):
"""Redshift Proxy"""
-
+ identifier = "io.openpype.creators.houdini.redshiftproxy"
label = "Redshift Proxy"
family = "redshiftproxy"
icon = "magic"
- def __init__(self, *args, **kwargs):
- super(CreateRedshiftProxy, self).__init__(*args, **kwargs)
-
+ def create(self, subset_name, instance_data, pre_create_data):
+ import hou # noqa
# Remove the active, we are checking the bypass flag of the nodes
- self.data.pop("active", None)
+ instance_data.pop("active", None)
# Redshift provides a `Redshift_Proxy_Output` node type which shows
# a limited set of parameters by default and is set to extract a
@@ -21,28 +23,24 @@ class CreateRedshiftProxy(plugin.Creator):
# why this happens.
# TODO: Somehow enforce so that it only shows the original limited
# attributes of the Redshift_Proxy_Output node type
- self.data.update({"node_type": "Redshift_Proxy_Output"})
+ instance_data.update({"node_type": "Redshift_Proxy_Output"})
- def _process(self, instance):
- """Creator main entry point.
+ instance = super(CreateRedshiftProxy, self).create(
+ subset_name,
+ instance_data,
+ pre_create_data) # type: CreatedInstance
- Args:
- instance (hou.Node): Created Houdini instance.
+ instance_node = hou.node(instance.get("instance_node"))
- """
parms = {
- "RS_archive_file": '$HIP/pyblish/`chs("subset")`.$F4.rs',
+ "RS_archive_file": '$HIP/pyblish/`{}.$F4.rs'.format(subset_name),
}
- if self.nodes:
- node = self.nodes[0]
- path = node.path()
- parms["RS_archive_sopPath"] = path
+ if self.selected_nodes:
+ parms["RS_archive_sopPath"] = self.selected_nodes[0].path()
- instance.setParms(parms)
+ instance_node.setParms(parms)
# Lock some Avalon attributes
- to_lock = ["family", "id"]
- for name in to_lock:
- parm = instance.parm(name)
- parm.lock(True)
+ to_lock = ["family", "id", "prim_to_detail_pattern"]
+ self.lock_parameters(instance_node, to_lock)
diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py
index 6949ca169b..2cbe9bfda1 100644
--- a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py
+++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py
@@ -1,41 +1,40 @@
-import hou
+# -*- coding: utf-8 -*-
+"""Creator plugin to create Redshift ROP."""
from openpype.hosts.houdini.api import plugin
+from openpype.pipeline import CreatedInstance
-class CreateRedshiftROP(plugin.Creator):
+class CreateRedshiftROP(plugin.HoudiniCreator):
"""Redshift ROP"""
-
+ identifier = "io.openpype.creators.houdini.redshift_rop"
label = "Redshift ROP"
family = "redshift_rop"
icon = "magic"
defaults = ["master"]
- def __init__(self, *args, **kwargs):
- super(CreateRedshiftROP, self).__init__(*args, **kwargs)
+ def create(self, subset_name, instance_data, pre_create_data):
+ import hou # noqa
+
+ instance_data.pop("active", None)
+ instance_data.update({"node_type": "Redshift_ROP"})
+ # Add chunk size attribute
+ instance_data["chunkSize"] = 10
# Clear the family prefix from the subset
- subset = self.data["subset"]
+ subset = subset_name
subset_no_prefix = subset[len(self.family):]
subset_no_prefix = subset_no_prefix[0].lower() + subset_no_prefix[1:]
- self.data["subset"] = subset_no_prefix
+ subset_name = subset_no_prefix
- # Add chunk size attribute
- self.data["chunkSize"] = 10
+ instance = super(CreateRedshiftROP, self).create(
+ subset_name,
+ instance_data,
+ pre_create_data) # type: CreatedInstance
- # Remove the active, we are checking the bypass flag of the nodes
- self.data.pop("active", None)
+ instance_node = hou.node(instance.get("instance_node"))
- self.data.update({"node_type": "Redshift_ROP"})
-
- def _process(self, instance):
- """Creator main entry point.
-
- Args:
- instance (hou.Node): Created Houdini instance.
-
- """
- basename = instance.name()
- instance.setName(basename + "_ROP", unique_name=True)
+ basename = instance_node.name()
+ instance_node.setName(basename + "_ROP", unique_name=True)
# Also create the linked Redshift IPR Rop
try:
@@ -43,11 +42,12 @@ class CreateRedshiftROP(plugin.Creator):
"Redshift_IPR", node_name=basename + "_IPR"
)
except hou.OperationFailed:
- raise Exception(("Cannot create Redshift node. Is Redshift "
- "installed and enabled?"))
+ raise plugin.OpenPypeCreatorError(
+ ("Cannot create Redshift node. Is Redshift "
+ "installed and enabled?"))
# Move it to directly under the Redshift ROP
- ipr_rop.setPosition(instance.position() + hou.Vector2(0, -1))
+ ipr_rop.setPosition(instance_node.position() + hou.Vector2(0, -1))
# Set the linked rop to the Redshift ROP
ipr_rop.parm("linked_rop").set(ipr_rop.relativePathTo(instance))
@@ -61,10 +61,8 @@ class CreateRedshiftROP(plugin.Creator):
"RS_outputMultilayerMode": 0, # no multi-layered exr
"RS_outputBeautyAOVSuffix": "beauty",
}
- instance.setParms(parms)
+ instance_node.setParms(parms)
# Lock some Avalon attributes
to_lock = ["family", "id"]
- for name in to_lock:
- parm = instance.parm(name)
- parm.lock(True)
+ self.lock_parameters(instance_node, to_lock)
diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py
index 5bcb7840c0..51ed8237c5 100644
--- a/openpype/hosts/houdini/plugins/create/create_usd.py
+++ b/openpype/hosts/houdini/plugins/create/create_usd.py
@@ -1,39 +1,39 @@
+# -*- coding: utf-8 -*-
+"""Creator plugin for creating USDs."""
from openpype.hosts.houdini.api import plugin
+from openpype.pipeline import CreatedInstance
-class CreateUSD(plugin.Creator):
+class CreateUSD(plugin.HoudiniCreator):
"""Universal Scene Description"""
-
+ identifier = "io.openpype.creators.houdini.usd"
label = "USD (experimental)"
family = "usd"
icon = "gears"
enabled = False
- def __init__(self, *args, **kwargs):
- super(CreateUSD, self).__init__(*args, **kwargs)
+ def create(self, subset_name, instance_data, pre_create_data):
+ import hou # noqa
- # Remove the active, we are checking the bypass flag of the nodes
- self.data.pop("active", None)
+ instance_data.pop("active", None)
+ instance_data.update({"node_type": "usd"})
- self.data.update({"node_type": "usd"})
+ instance = super(CreateUSD, self).create(
+ subset_name,
+ instance_data,
+ pre_create_data) # type: CreatedInstance
- def _process(self, instance):
- """Creator main entry point.
+ instance_node = hou.node(instance.get("instance_node"))
- Args:
- instance (hou.Node): Created Houdini instance.
-
- """
parms = {
- "lopoutput": "$HIP/pyblish/%s.usd" % self.name,
+ "lopoutput": "$HIP/pyblish/{}.usd".format(subset_name),
"enableoutputprocessor_simplerelativepaths": False,
}
- if self.nodes:
- node = self.nodes[0]
- parms.update({"loppath": node.path()})
+ if self.selected_nodes:
+ parms["loppath"] = self.selected_nodes[0].path()
- instance.setParms(parms)
+ instance_node.setParms(parms)
# Lock any parameters in this list
to_lock = [
@@ -42,6 +42,4 @@ class CreateUSD(plugin.Creator):
"family",
"id",
]
- for name in to_lock:
- parm = instance.parm(name)
- parm.lock(True)
+ self.lock_parameters(instance_node, to_lock)
diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py
index cb3fe3f02b..f78f0bed50 100644
--- a/openpype/hosts/houdini/plugins/create/create_usdrender.py
+++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py
@@ -1,42 +1,41 @@
-import hou
+# -*- coding: utf-8 -*-
+"""Creator plugin for creating USD renders."""
from openpype.hosts.houdini.api import plugin
+from openpype.pipeline import CreatedInstance
-class CreateUSDRender(plugin.Creator):
+class CreateUSDRender(plugin.HoudiniCreator):
"""USD Render ROP in /stage"""
-
+ identifier = "io.openpype.creators.houdini.usdrender"
label = "USD Render (experimental)"
family = "usdrender"
icon = "magic"
- def __init__(self, *args, **kwargs):
- super(CreateUSDRender, self).__init__(*args, **kwargs)
+ def create(self, subset_name, instance_data, pre_create_data):
+ import hou # noqa
- self.parent = hou.node("/stage")
+ instance_data["parent"] = hou.node("/stage")
# Remove the active, we are checking the bypass flag of the nodes
- self.data.pop("active", None)
+ instance_data.pop("active", None)
+ instance_data.update({"node_type": "usdrender"})
- self.data.update({"node_type": "usdrender"})
+ instance = super(CreateUSDRender, self).create(
+ subset_name,
+ instance_data,
+ pre_create_data) # type: CreatedInstance
- def _process(self, instance):
- """Creator main entry point.
+ instance_node = hou.node(instance.get("instance_node"))
- Args:
- instance (hou.Node): Created Houdini instance.
- """
parms = {
# Render frame range
"trange": 1
}
- if self.nodes:
- node = self.nodes[0]
- parms.update({"loppath": node.path()})
- instance.setParms(parms)
+ if self.selected_nodes:
+ parms["loppath"] = self.selected_nodes[0].path()
+ instance_node.setParms(parms)
# Lock some Avalon attributes
to_lock = ["family", "id"]
- for name in to_lock:
- parm = instance.parm(name)
- parm.lock(True)
+ self.lock_parameters(instance_node, to_lock)
diff --git a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py
index 242c21fc72..1a5011745f 100644
--- a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py
+++ b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py
@@ -1,38 +1,36 @@
+# -*- coding: utf-8 -*-
+"""Creator plugin for creating VDB Caches."""
from openpype.hosts.houdini.api import plugin
+from openpype.pipeline import CreatedInstance
-class CreateVDBCache(plugin.Creator):
+class CreateVDBCache(plugin.HoudiniCreator):
"""OpenVDB from Geometry ROP"""
-
+ identifier = "io.openpype.creators.houdini.vdbcache"
name = "vbdcache"
label = "VDB Cache"
family = "vdbcache"
icon = "cloud"
- def __init__(self, *args, **kwargs):
- super(CreateVDBCache, self).__init__(*args, **kwargs)
+ def create(self, subset_name, instance_data, pre_create_data):
+ import hou
- # Remove the active, we are checking the bypass flag of the nodes
- self.data.pop("active", None)
+ instance_data.pop("active", None)
+ instance_data.update({"node_type": "geometry"})
- # Set node type to create for output
- self.data["node_type"] = "geometry"
+ instance = super(CreateVDBCache, self).create(
+ subset_name,
+ instance_data,
+ pre_create_data) # type: CreatedInstance
- def _process(self, instance):
- """Creator main entry point.
-
- Args:
- instance (hou.Node): Created Houdini instance.
-
- """
+ instance_node = hou.node(instance.get("instance_node"))
parms = {
- "sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name,
+ "sopoutput": "$HIP/pyblish/{}.$F4.vdb".format(subset_name),
"initsim": True,
"trange": 1
}
- if self.nodes:
- node = self.nodes[0]
- parms.update({"soppath": node.path()})
+ if self.selected_nodes:
+ parms["soppath"] = self.selected_nodes[0].path()
- instance.setParms(parms)
+ instance_node.setParms(parms)
diff --git a/openpype/hosts/houdini/plugins/create/create_workfile.py b/openpype/hosts/houdini/plugins/create/create_workfile.py
new file mode 100644
index 0000000000..0c6d840810
--- /dev/null
+++ b/openpype/hosts/houdini/plugins/create/create_workfile.py
@@ -0,0 +1,93 @@
+# -*- coding: utf-8 -*-
+"""Creator plugin for creating workfiles."""
+from openpype.hosts.houdini.api import plugin
+from openpype.hosts.houdini.api.lib import read, imprint
+from openpype.hosts.houdini.api.pipeline import CONTEXT_CONTAINER
+from openpype.pipeline import CreatedInstance, AutoCreator
+from openpype.pipeline import legacy_io
+from openpype.client import get_asset_by_name
+import hou
+
+
+class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator):
+ """Workfile auto-creator."""
+ identifier = "io.openpype.creators.houdini.workfile"
+ label = "Workfile"
+ family = "workfile"
+ icon = "document"
+
+ default_variant = "Main"
+
+ def create(self):
+ variant = self.default_variant
+ current_instance = next(
+ (
+ instance for instance in self.create_context.instances
+ if instance.creator_identifier == self.identifier
+ ), None)
+
+ project_name = self.project_name
+ asset_name = legacy_io.Session["AVALON_ASSET"]
+ task_name = legacy_io.Session["AVALON_TASK"]
+ host_name = legacy_io.Session["AVALON_APP"]
+
+ if current_instance is None:
+ asset_doc = get_asset_by_name(project_name, asset_name)
+ subset_name = self.get_subset_name(
+ variant, task_name, asset_doc, project_name, host_name
+ )
+ data = {
+ "asset": asset_name,
+ "task": task_name,
+ "variant": variant
+ }
+ data.update(
+ self.get_dynamic_data(
+ variant, task_name, asset_doc,
+ project_name, host_name, current_instance)
+ )
+ self.log.info("Auto-creating workfile instance...")
+ current_instance = CreatedInstance(
+ self.family, subset_name, data, self
+ )
+ self._add_instance_to_context(current_instance)
+ elif (
+ current_instance["asset"] != asset_name
+ or current_instance["task"] != task_name
+ ):
+ # Update instance context if is not the same
+ asset_doc = get_asset_by_name(project_name, asset_name)
+ subset_name = self.get_subset_name(
+ variant, task_name, asset_doc, project_name, host_name
+ )
+ current_instance["asset"] = asset_name
+ current_instance["task"] = task_name
+ current_instance["subset"] = subset_name
+
+ # write workfile information to context container.
+ op_ctx = hou.node(CONTEXT_CONTAINER)
+ if not op_ctx:
+ op_ctx = self.create_context_node()
+
+ workfile_data = {"workfile": current_instance.data_to_store()}
+ imprint(op_ctx, workfile_data)
+
+ def collect_instances(self):
+ op_ctx = hou.node(CONTEXT_CONTAINER)
+ instance = read(op_ctx)
+ if not instance:
+ return
+ workfile = instance.get("workfile")
+ if not workfile:
+ return
+ created_instance = CreatedInstance.from_existing(
+ workfile, self
+ )
+ self._add_instance_to_context(created_instance)
+
+ def update_instances(self, update_list):
+ op_ctx = hou.node(CONTEXT_CONTAINER)
+ for created_inst, _changes in update_list:
+ if created_inst["creator_identifier"] == self.identifier:
+ workfile_data = {"workfile": created_inst.data_to_store()}
+ imprint(op_ctx, workfile_data, update=True)
diff --git a/openpype/hosts/houdini/plugins/publish/collect_active_state.py b/openpype/hosts/houdini/plugins/publish/collect_active_state.py
index 862d5720e1..cc3f2e7fae 100644
--- a/openpype/hosts/houdini/plugins/publish/collect_active_state.py
+++ b/openpype/hosts/houdini/plugins/publish/collect_active_state.py
@@ -1,4 +1,5 @@
import pyblish.api
+import hou
class CollectInstanceActiveState(pyblish.api.InstancePlugin):
@@ -24,7 +25,7 @@ class CollectInstanceActiveState(pyblish.api.InstancePlugin):
# Check bypass state and reverse
active = True
- node = instance[0]
+ node = hou.node(instance.get("instance_node"))
if hasattr(node, "isBypassed"):
active = not node.isBypassed()
diff --git a/openpype/hosts/houdini/plugins/publish/collect_current_file.py b/openpype/hosts/houdini/plugins/publish/collect_current_file.py
index 1383c274a2..9cca07fdc7 100644
--- a/openpype/hosts/houdini/plugins/publish/collect_current_file.py
+++ b/openpype/hosts/houdini/plugins/publish/collect_current_file.py
@@ -5,19 +5,20 @@ from openpype.pipeline import legacy_io
import pyblish.api
-class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin):
+class CollectHoudiniCurrentFile(pyblish.api.InstancePlugin):
"""Inject the current working file into context"""
order = pyblish.api.CollectorOrder - 0.01
label = "Houdini Current File"
hosts = ["houdini"]
+ family = ["workfile"]
- def process(self, context):
+ def process(self, instance):
"""Inject the current working file"""
current_file = hou.hipFile.path()
if not os.path.exists(current_file):
- # By default Houdini will even point a new scene to a path.
+ # By default, Houdini will even point a new scene to a path.
# However if the file is not saved at all and does not exist,
# we assume the user never set it.
filepath = ""
@@ -34,43 +35,26 @@ class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin):
"saved correctly."
)
- context.data["currentFile"] = current_file
+ instance.context.data["currentFile"] = current_file
folder, file = os.path.split(current_file)
filename, ext = os.path.splitext(file)
- task = legacy_io.Session["AVALON_TASK"]
-
- data = {}
-
- # create instance
- instance = context.create_instance(name=filename)
- subset = 'workfile' + task.capitalize()
-
- data.update({
- "subset": subset,
- "asset": os.getenv("AVALON_ASSET", None),
- "label": subset,
- "publish": True,
- "family": 'workfile',
- "families": ['workfile'],
+ instance.data.update({
"setMembers": [current_file],
- "frameStart": context.data['frameStart'],
- "frameEnd": context.data['frameEnd'],
- "handleStart": context.data['handleStart'],
- "handleEnd": context.data['handleEnd']
+ "frameStart": instance.context.data['frameStart'],
+ "frameEnd": instance.context.data['frameEnd'],
+ "handleStart": instance.context.data['handleStart'],
+ "handleEnd": instance.context.data['handleEnd']
})
- data['representations'] = [{
+ instance.data['representations'] = [{
'name': ext.lstrip("."),
'ext': ext.lstrip("."),
'files': file,
"stagingDir": folder,
}]
- instance.data.update(data)
-
self.log.info('Collected instance: {}'.format(file))
self.log.info('Scene path: {}'.format(current_file))
self.log.info('staging Dir: {}'.format(folder))
- self.log.info('subset: {}'.format(subset))
diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py
index 9bd43d8a09..531cdf1249 100644
--- a/openpype/hosts/houdini/plugins/publish/collect_frames.py
+++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py
@@ -1,19 +1,13 @@
+# -*- coding: utf-8 -*-
+"""Collector plugin for frames data on ROP instances."""
import os
import re
-import hou
+import hou # noqa
import pyblish.api
from openpype.hosts.houdini.api import lib
-def splitext(name, allowed_multidot_extensions):
-
- for ext in allowed_multidot_extensions:
- if name.endswith(ext):
- return name[:-len(ext)], ext
-
- return os.path.splitext(name)
-
class CollectFrames(pyblish.api.InstancePlugin):
"""Collect all frames which would be saved from the ROP nodes"""
@@ -24,7 +18,9 @@ class CollectFrames(pyblish.api.InstancePlugin):
def process(self, instance):
- ropnode = instance[0]
+ ropnode = hou.node(instance.data["instance_node"])
+ frame_data = lib.get_frame_data(ropnode)
+ instance.data.update(frame_data)
start_frame = instance.data.get("frameStart", None)
end_frame = instance.data.get("frameEnd", None)
@@ -38,13 +34,13 @@ class CollectFrames(pyblish.api.InstancePlugin):
self.log.warning("Using current frame: {}".format(hou.frame()))
output = output_parm.eval()
- _, ext = splitext(output,
+ _, ext = lib.splitext(output,
allowed_multidot_extensions=[".ass.gz"])
file_name = os.path.basename(output)
result = file_name
# Get the filename pattern match from the output
- # path so we can compute all frames that would
+ # path, so we can compute all frames that would
# come out from rendering the ROP node if there
# is a frame pattern in the name
pattern = r"\w+\.(\d+)" + re.escape(ext)
@@ -63,8 +59,9 @@ class CollectFrames(pyblish.api.InstancePlugin):
# for a custom frame list. So this should be refactored.
instance.data.update({"frames": result})
- def create_file_list(self, match, start_frame, end_frame):
- """Collect files based on frame range and regex.match
+ @staticmethod
+ def create_file_list(match, start_frame, end_frame):
+ """Collect files based on frame range and `regex.match`
Args:
match(re.match): match object
diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances.py b/openpype/hosts/houdini/plugins/publish/collect_instances.py
index d38927984a..bb85630552 100644
--- a/openpype/hosts/houdini/plugins/publish/collect_instances.py
+++ b/openpype/hosts/houdini/plugins/publish/collect_instances.py
@@ -47,6 +47,11 @@ class CollectInstances(pyblish.api.ContextPlugin):
if node.evalParm("id") != "pyblish.avalon.instance":
continue
+ # instance was created by new creator code, skip it as
+ # it is already collected.
+ if node.parm("creator_identifier"):
+ continue
+
has_family = node.evalParm("family")
assert has_family, "'%s' is missing 'family'" % node.name()
@@ -58,7 +63,8 @@ class CollectInstances(pyblish.api.ContextPlugin):
data.update({"active": not node.isBypassed()})
# temporarily translation of `active` to `publish` till issue has
- # been resolved, https://github.com/pyblish/pyblish-base/issues/307
+ # been resolved.
+ # https://github.com/pyblish/pyblish-base/issues/307
if "active" in data:
data["publish"] = data["active"]
@@ -78,6 +84,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
instance.data["families"] = [instance.data["family"]]
instance[:] = [node]
+ instance.data["instance_node"] = node.path()
instance.data.update(data)
def sort_by_family(instance):
diff --git a/openpype/hosts/houdini/plugins/publish/collect_output_node.py b/openpype/hosts/houdini/plugins/publish/collect_output_node.py
index 0130c0a8da..601ed17b39 100644
--- a/openpype/hosts/houdini/plugins/publish/collect_output_node.py
+++ b/openpype/hosts/houdini/plugins/publish/collect_output_node.py
@@ -22,7 +22,7 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin):
import hou
- node = instance[0]
+ node = hou.node(instance.data["instance_node"])
# Get sop path
node_type = node.type().name()
diff --git a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py
index 72b554b567..346bdf3421 100644
--- a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py
+++ b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py
@@ -69,7 +69,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
def process(self, instance):
- rop = instance[0]
+ rop = hou.node(instance.get("instance_node"))
# Collect chunkSize
chunk_size_parm = rop.parm("chunkSize")
diff --git a/openpype/hosts/houdini/plugins/publish/collect_render_products.py b/openpype/hosts/houdini/plugins/publish/collect_render_products.py
index d7163b43c0..fcd80e0082 100644
--- a/openpype/hosts/houdini/plugins/publish/collect_render_products.py
+++ b/openpype/hosts/houdini/plugins/publish/collect_render_products.py
@@ -53,7 +53,7 @@ class CollectRenderProducts(pyblish.api.InstancePlugin):
node = instance.data.get("output_node")
if not node:
- rop_path = instance[0].path()
+ rop_path = instance.data["instance_node"].path()
raise RuntimeError(
"No output node found. Make sure to connect an "
"input to the USD ROP: %s" % rop_path
diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py
index cf8d61cda3..81274c670e 100644
--- a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py
+++ b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py
@@ -1,6 +1,6 @@
import pyblish.api
-from openyppe.client import get_subset_by_name, get_asset_by_name
+from openpype.client import get_subset_by_name, get_asset_by_name
from openpype.pipeline import legacy_io
import openpype.lib.usdlib as usdlib
diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py
index e3985e3c97..833add854b 100644
--- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py
+++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py
@@ -3,6 +3,8 @@ import os
import pyblish.api
import openpype.hosts.houdini.api.usd as usdlib
+import hou
+
class CollectUsdLayers(pyblish.api.InstancePlugin):
"""Collect the USD Layers that have configured save paths."""
@@ -19,7 +21,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin):
self.log.debug("No output node found..")
return
- rop_node = instance[0]
+ rop_node = hou.node(instance.get("instance_node"))
save_layers = []
for layer in usdlib.get_configured_save_layers(rop_node):
@@ -54,8 +56,10 @@ class CollectUsdLayers(pyblish.api.InstancePlugin):
layer_inst.data["subset"] = "__stub__"
layer_inst.data["label"] = label
layer_inst.data["asset"] = instance.data["asset"]
- layer_inst.append(instance[0]) # include same USD ROP
- layer_inst.append((layer, save_path)) # include layer data
+ # include same USD ROP
+ layer_inst.append(rop_node)
+ # include layer data
+ layer_inst.append((layer, save_path))
# Allow this subset to be grouped into a USD Layer on creation
layer_inst.data["subsetGroup"] = "USD Layer"
diff --git a/openpype/hosts/houdini/plugins/publish/extract_alembic.py b/openpype/hosts/houdini/plugins/publish/extract_alembic.py
index 758d4c560b..cb2d4ef424 100644
--- a/openpype/hosts/houdini/plugins/publish/extract_alembic.py
+++ b/openpype/hosts/houdini/plugins/publish/extract_alembic.py
@@ -5,6 +5,8 @@ import pyblish.api
from openpype.pipeline import publish
from openpype.hosts.houdini.api.lib import render_rop
+import hou
+
class ExtractAlembic(publish.Extractor):
@@ -15,7 +17,7 @@ class ExtractAlembic(publish.Extractor):
def process(self, instance):
- ropnode = instance[0]
+ ropnode = hou.node(instance.data["instance_node"])
# Get the filename from the filename parameter
output = ropnode.evalParm("filename")
diff --git a/openpype/hosts/houdini/plugins/publish/extract_ass.py b/openpype/hosts/houdini/plugins/publish/extract_ass.py
index a302b451cb..0d246625ba 100644
--- a/openpype/hosts/houdini/plugins/publish/extract_ass.py
+++ b/openpype/hosts/houdini/plugins/publish/extract_ass.py
@@ -5,6 +5,8 @@ import pyblish.api
from openpype.pipeline import publish
from openpype.hosts.houdini.api.lib import render_rop
+import hou
+
class ExtractAss(publish.Extractor):
@@ -15,7 +17,7 @@ class ExtractAss(publish.Extractor):
def process(self, instance):
- ropnode = instance[0]
+ ropnode = hou.node(instance.data["instance_node"])
# Get the filename from the filename parameter
# `.evalParm(parameter)` will make sure all tokens are resolved
@@ -33,8 +35,12 @@ class ExtractAss(publish.Extractor):
# error and thus still continues to the integrator. To capture that
# we make sure all files exist
files = instance.data["frames"]
- missing = [fname for fname in files
- if not os.path.exists(os.path.join(staging_dir, fname))]
+ missing = []
+ for file_name in files:
+ full_path = os.path.normpath(os.path.join(staging_dir, file_name))
+ if not os.path.exists(full_path):
+ missing.append(full_path)
+
if missing:
raise RuntimeError("Failed to complete Arnold ass extraction. "
"Missing output files: {}".format(missing))
diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py
index 23e875f107..7a1ab36b93 100644
--- a/openpype/hosts/houdini/plugins/publish/extract_composite.py
+++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py
@@ -1,9 +1,10 @@
import os
-
import pyblish.api
from openpype.pipeline import publish
-from openpype.hosts.houdini.api.lib import render_rop
+from openpype.hosts.houdini.api.lib import render_rop, splitext
+
+import hou
class ExtractComposite(publish.Extractor):
@@ -15,7 +16,7 @@ class ExtractComposite(publish.Extractor):
def process(self, instance):
- ropnode = instance[0]
+ ropnode = hou.node(instance.data["instance_node"])
# Get the filename from the copoutput parameter
# `.evalParm(parameter)` will make sure all tokens are resolved
@@ -28,8 +29,24 @@ class ExtractComposite(publish.Extractor):
render_rop(ropnode)
- if "files" not in instance.data:
- instance.data["files"] = []
+ output = instance.data["frames"]
+ _, ext = splitext(output[0], [])
+ ext = ext.lstrip(".")
- frames = instance.data["frames"]
- instance.data["files"].append(frames)
+ if "representations" not in instance.data:
+ instance.data["representations"] = []
+
+ representation = {
+ "name": ext,
+ "ext": ext,
+ "files": output,
+ "stagingDir": staging_dir,
+ "frameStart": instance.data["frameStart"],
+ "frameEnd": instance.data["frameEnd"],
+ }
+
+ from pprint import pformat
+
+ self.log.info(pformat(representation))
+
+ instance.data["representations"].append(representation)
diff --git a/openpype/hosts/houdini/plugins/publish/extract_hda.py b/openpype/hosts/houdini/plugins/publish/extract_hda.py
index 7dd03a92b7..8b97bf364f 100644
--- a/openpype/hosts/houdini/plugins/publish/extract_hda.py
+++ b/openpype/hosts/houdini/plugins/publish/extract_hda.py
@@ -1,11 +1,9 @@
# -*- coding: utf-8 -*-
import os
-
from pprint import pformat
-
import pyblish.api
-
from openpype.pipeline import publish
+import hou
class ExtractHDA(publish.Extractor):
@@ -17,7 +15,7 @@ class ExtractHDA(publish.Extractor):
def process(self, instance):
self.log.info(pformat(instance.data))
- hda_node = instance[0]
+ hda_node = hou.node(instance.data.get("instance_node"))
hda_def = hda_node.type().definition()
hda_options = hda_def.options()
hda_options.setSaveInitialParmsAndContents(True)
diff --git a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py
index ca9be64a47..29ede98a52 100644
--- a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py
+++ b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py
@@ -5,6 +5,8 @@ import pyblish.api
from openpype.pipeline import publish
from openpype.hosts.houdini.api.lib import render_rop
+import hou
+
class ExtractRedshiftProxy(publish.Extractor):
@@ -15,7 +17,7 @@ class ExtractRedshiftProxy(publish.Extractor):
def process(self, instance):
- ropnode = instance[0]
+ ropnode = hou.node(instance.get("instance_node"))
# Get the filename from the filename parameter
# `.evalParm(parameter)` will make sure all tokens are resolved
diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py
index 78c32affb4..cbeb5add71 100644
--- a/openpype/hosts/houdini/plugins/publish/extract_usd.py
+++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py
@@ -5,6 +5,7 @@ import pyblish.api
from openpype.pipeline import publish
from openpype.hosts.houdini.api.lib import render_rop
+import hou
class ExtractUSD(publish.Extractor):
@@ -17,7 +18,7 @@ class ExtractUSD(publish.Extractor):
def process(self, instance):
- ropnode = instance[0]
+ ropnode = hou.node(instance.get("instance_node"))
# Get the filename from the filename parameter
output = ropnode.evalParm("lopoutput")
diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py
index f686f712bb..0288b7363a 100644
--- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py
+++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py
@@ -187,7 +187,7 @@ class ExtractUSDLayered(publish.Extractor):
# Main ROP node, either a USD Rop or ROP network with
# multiple USD ROPs
- node = instance[0]
+ node = hou.node(instance.get("instance_node"))
# Collect any output dependencies that have not been processed yet
# during extraction of other instances
diff --git a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py
index 26ec423048..434d6a2160 100644
--- a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py
+++ b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py
@@ -5,6 +5,8 @@ import pyblish.api
from openpype.pipeline import publish
from openpype.hosts.houdini.api.lib import render_rop
+import hou
+
class ExtractVDBCache(publish.Extractor):
@@ -15,7 +17,7 @@ class ExtractVDBCache(publish.Extractor):
def process(self, instance):
- ropnode = instance[0]
+ ropnode = hou.node(instance.get("instance_node"))
# Get the filename from the filename parameter
# `.evalParm(parameter)` will make sure all tokens are resolved
diff --git a/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml b/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml
new file mode 100644
index 0000000000..0f92560bf7
--- /dev/null
+++ b/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml
@@ -0,0 +1,21 @@
+
+
+
+Scene setting
+
+## Invalid input node
+
+VDB input must have the same number of VDBs, points, primitives and vertices as output.
+
+
+
+### __Detailed Info__ (optional)
+
+A VDB is an inherited type of Prim, holds the following data:
+ - Primitives: 1
+ - Points: 1
+ - Vertices: 1
+ - VDBs: 1
+
+
+
\ No newline at end of file
diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file.py b/openpype/hosts/houdini/plugins/publish/increment_current_file.py
index c990f481d3..16d9ef9aec 100644
--- a/openpype/hosts/houdini/plugins/publish/increment_current_file.py
+++ b/openpype/hosts/houdini/plugins/publish/increment_current_file.py
@@ -2,7 +2,7 @@ import pyblish.api
from openpype.lib import version_up
from openpype.pipeline import registered_host
-
+from openpype.hosts.houdini.api import HoudiniHost
class IncrementCurrentFile(pyblish.api.ContextPlugin):
"""Increment the current file.
@@ -20,11 +20,11 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin):
def process(self, context):
# Filename must not have changed since collecting
- host = registered_host()
+ host = registered_host() # type: HoudiniHost
current_file = host.current_file()
assert (
context.data["currentFile"] == current_file
), "Collected filename from current scene name."
new_filepath = version_up(current_file)
- host.save(new_filepath)
+ host.save_workfile(new_filepath)
diff --git a/openpype/hosts/houdini/plugins/publish/save_scene.py b/openpype/hosts/houdini/plugins/publish/save_scene.py
index 6128c7af77..d6e07ccab0 100644
--- a/openpype/hosts/houdini/plugins/publish/save_scene.py
+++ b/openpype/hosts/houdini/plugins/publish/save_scene.py
@@ -14,13 +14,13 @@ class SaveCurrentScene(pyblish.api.ContextPlugin):
# Filename must not have changed since collecting
host = registered_host()
- current_file = host.current_file()
+ current_file = host.get_current_workfile()
assert context.data['currentFile'] == current_file, (
"Collected filename from current scene name."
)
if host.has_unsaved_changes():
- self.log.info("Saving current file..")
- host.save_file(current_file)
+ self.log.info("Saving current file {}...".format(current_file))
+ host.save_workfile(current_file)
else:
self.log.debug("No unsaved changes, skipping file save..")
diff --git a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py
deleted file mode 100644
index ac408bc842..0000000000
--- a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py
+++ /dev/null
@@ -1,47 +0,0 @@
-import pyblish.api
-from openpype.pipeline.publish import ValidateContentsOrder
-
-
-class ValidateVDBInputNode(pyblish.api.InstancePlugin):
- """Validate that the node connected to the output node is of type VDB.
-
- Regardless of the amount of VDBs create the output will need to have an
- equal amount of VDBs, points, primitives and vertices
-
- A VDB is an inherited type of Prim, holds the following data:
- - Primitives: 1
- - Points: 1
- - Vertices: 1
- - VDBs: 1
-
- """
-
- order = ValidateContentsOrder + 0.1
- families = ["vdbcache"]
- hosts = ["houdini"]
- label = "Validate Input Node (VDB)"
-
- def process(self, instance):
- invalid = self.get_invalid(instance)
- if invalid:
- raise RuntimeError(
- "Node connected to the output node is not" "of type VDB!"
- )
-
- @classmethod
- def get_invalid(cls, instance):
-
- node = instance.data["output_node"]
-
- prims = node.geometry().prims()
- nr_of_prims = len(prims)
-
- nr_of_points = len(node.geometry().points())
- if nr_of_points != nr_of_prims:
- cls.log.error("The number of primitives and points do not match")
- return [instance]
-
- for prim in prims:
- if prim.numVertices() != 1:
- cls.log.error("Found primitive with more than 1 vertex!")
- return [instance]
diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py
index ea800707fb..86e92a052f 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py
@@ -1,8 +1,8 @@
+# -*- coding: utf-8 -*-
import pyblish.api
from collections import defaultdict
-
-from openpype.pipeline.publish import ValidateContentsOrder
+from openpype.pipeline import PublishValidationError
class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
@@ -16,7 +16,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
"""
- order = ValidateContentsOrder + 0.1
+ order = pyblish.api.ValidatorOrder + 0.1
families = ["pointcache"]
hosts = ["houdini"]
label = "Validate Primitive to Detail (Abc)"
@@ -24,18 +24,26 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
- raise RuntimeError(
- "Primitives found with inconsistent primitive "
- "to detail attributes. See log."
+ raise PublishValidationError(
+ ("Primitives found with inconsistent primitive "
+ "to detail attributes. See log."),
+ title=self.label
)
@classmethod
def get_invalid(cls, instance):
+ import hou # noqa
+ output_node = instance.data.get("output_node")
+ rop_node = hou.node(instance.data["instance_node"])
+ if output_node is None:
+ cls.log.error(
+ "SOP Output node in '%s' does not exist. "
+ "Ensure a valid SOP output path is set." % rop_node.path()
+ )
- output = instance.data["output_node"]
+ return [rop_node.path()]
- rop = instance[0]
- pattern = rop.parm("prim_to_detail_pattern").eval().strip()
+ pattern = rop_node.parm("prim_to_detail_pattern").eval().strip()
if not pattern:
cls.log.debug(
"Alembic ROP has no 'Primitive to Detail' pattern. "
@@ -43,7 +51,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
)
return
- build_from_path = rop.parm("build_from_path").eval()
+ build_from_path = rop_node.parm("build_from_path").eval()
if not build_from_path:
cls.log.debug(
"Alembic ROP has 'Build from Path' disabled. "
@@ -51,14 +59,14 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
)
return
- path_attr = rop.parm("path_attrib").eval()
+ path_attr = rop_node.parm("path_attrib").eval()
if not path_attr:
cls.log.error(
"The Alembic ROP node has no Path Attribute"
"value set, but 'Build Hierarchy from Attribute'"
"is enabled."
)
- return [rop.path()]
+ return [rop_node.path()]
# Let's assume each attribute is explicitly named for now and has no
# wildcards for Primitive to Detail. This simplifies the check.
@@ -67,7 +75,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
# Check if the primitive attribute exists
frame = instance.data.get("frameStart", 0)
- geo = output.geometryAtFrame(frame)
+ geo = output_node.geometryAtFrame(frame)
# If there are no primitives on the start frame then it might be
# something that is emitted over time. As such we can't actually
@@ -86,7 +94,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
"Geometry Primitives are missing "
"path attribute: `%s`" % path_attr
)
- return [output.path()]
+ return [output_node.path()]
# Ensure at least a single string value is present
if not attrib.strings():
@@ -94,7 +102,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
"Primitive path attribute has no "
"string values: %s" % path_attr
)
- return [output.path()]
+ return [output_node.path()]
paths = None
for attr in pattern.split(" "):
@@ -130,4 +138,4 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
"Path has multiple values: %s (path: %s)"
% (list(values), path)
)
- return [output.path()]
+ return [output_node.path()]
diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py
index cbed3ea235..44d58cfa36 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py
@@ -1,7 +1,6 @@
+# -*- coding: utf-8 -*-
import pyblish.api
-
-from openpype.pipeline.publish import ValidateContentsOrder
-
+import hou
class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin):
"""Validate Face Sets are disabled for extraction to pointcache.
@@ -18,14 +17,14 @@ class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin):
"""
- order = ValidateContentsOrder + 0.1
+ order = pyblish.api.ValidatorOrder + 0.1
families = ["pointcache"]
hosts = ["houdini"]
label = "Validate Alembic ROP Face Sets"
def process(self, instance):
- rop = instance[0]
+ rop = hou.node(instance.data["instance_node"])
facesets = rop.parm("facesets").eval()
# 0 = No Face Sets
diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py
index 2625ae5f83..bafb206bd3 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py
@@ -1,6 +1,7 @@
+# -*- coding: utf-8 -*-
import pyblish.api
-
-from openpype.pipeline.publish import ValidateContentsOrder
+from openpype.pipeline import PublishValidationError
+import hou
class ValidateAlembicInputNode(pyblish.api.InstancePlugin):
@@ -12,7 +13,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin):
"""
- order = ValidateContentsOrder + 0.1
+ order = pyblish.api.ValidatorOrder + 0.1
families = ["pointcache"]
hosts = ["houdini"]
label = "Validate Input Node (Abc)"
@@ -20,18 +21,28 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin):
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
- raise RuntimeError(
- "Primitive types found that are not supported"
- "for Alembic output."
+ raise PublishValidationError(
+ ("Primitive types found that are not supported"
+ "for Alembic output."),
+ title=self.label
)
@classmethod
def get_invalid(cls, instance):
invalid_prim_types = ["VDB", "Volume"]
- node = instance.data["output_node"]
+ output_node = instance.data.get("output_node")
- if not hasattr(node, "geometry"):
+ if output_node is None:
+ node = hou.node(instance.data["instance_node"])
+ cls.log.error(
+ "SOP Output node in '%s' does not exist. "
+ "Ensure a valid SOP output path is set." % node.path()
+ )
+
+ return [node.path()]
+
+ if not hasattr(output_node, "geometry"):
# In the case someone has explicitly set an Object
# node instead of a SOP node in Geometry context
# then for now we ignore - this allows us to also
@@ -40,7 +51,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin):
return
frame = instance.data.get("frameStart", 0)
- geo = node.geometryAtFrame(frame)
+ geo = output_node.geometryAtFrame(frame)
invalid = False
for prim_type in invalid_prim_types:
diff --git a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py
index 5eb8f93d03..f11f9c0c62 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py
@@ -1,6 +1,7 @@
import pyblish.api
from openpype.hosts.houdini.api import lib
+import hou
class ValidateAnimationSettings(pyblish.api.InstancePlugin):
@@ -36,7 +37,7 @@ class ValidateAnimationSettings(pyblish.api.InstancePlugin):
@classmethod
def get_invalid(cls, instance):
- node = instance[0]
+ node = hou.node(instance.get("instance_node"))
# Check trange parm, 0 means Render Current Frame
frame_range = node.evalParm("trange")
diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py
index 7cf8da69d6..1bf51a986c 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py
@@ -1,6 +1,8 @@
+# -*- coding: utf-8 -*-
import pyblish.api
-from openpype.pipeline.publish import ValidateContentsOrder
+from openpype.pipeline import PublishValidationError
+import hou
class ValidateBypassed(pyblish.api.InstancePlugin):
"""Validate all primitives build hierarchy from attribute when enabled.
@@ -11,7 +13,7 @@ class ValidateBypassed(pyblish.api.InstancePlugin):
"""
- order = ValidateContentsOrder - 0.1
+ order = pyblish.api.ValidatorOrder - 0.1
families = ["*"]
hosts = ["houdini"]
label = "Validate ROP Bypass"
@@ -26,14 +28,15 @@ class ValidateBypassed(pyblish.api.InstancePlugin):
invalid = self.get_invalid(instance)
if invalid:
rop = invalid[0]
- raise RuntimeError(
- "ROP node %s is set to bypass, publishing cannot continue.."
- % rop.path()
+ raise PublishValidationError(
+ ("ROP node {} is set to bypass, publishing cannot "
+ "continue.".format(rop.path())),
+ title=self.label
)
@classmethod
def get_invalid(cls, instance):
- rop = instance[0]
+ rop = hou.node(instance.get("instance_node"))
if hasattr(rop, "isBypassed") and rop.isBypassed():
return [rop]
diff --git a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py
index d414920f8b..41b5273e6a 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py
@@ -1,11 +1,13 @@
+# -*- coding: utf-8 -*-
+"""Validator plugin for Houdini Camera ROP settings."""
import pyblish.api
-from openpype.pipeline.publish import ValidateContentsOrder
+from openpype.pipeline import PublishValidationError
class ValidateCameraROP(pyblish.api.InstancePlugin):
"""Validate Camera ROP settings."""
- order = ValidateContentsOrder
+ order = pyblish.api.ValidatorOrder
families = ["camera"]
hosts = ["houdini"]
label = "Camera ROP"
@@ -14,30 +16,45 @@ class ValidateCameraROP(pyblish.api.InstancePlugin):
import hou
- node = instance[0]
+ node = hou.node(instance.data.get("instance_node"))
if node.parm("use_sop_path").eval():
- raise RuntimeError(
- "Alembic ROP for Camera export should not be "
- "set to 'Use Sop Path'. Please disable."
+ raise PublishValidationError(
+ ("Alembic ROP for Camera export should not be "
+ "set to 'Use Sop Path'. Please disable."),
+ title=self.label
)
# Get the root and objects parameter of the Alembic ROP node
root = node.parm("root").eval()
objects = node.parm("objects").eval()
- assert root, "Root parameter must be set on Alembic ROP"
- assert root.startswith("/"), "Root parameter must start with slash /"
- assert objects, "Objects parameter must be set on Alembic ROP"
- assert len(objects.split(" ")) == 1, "Must have only a single object."
+ errors = []
+ if not root:
+ errors.append("Root parameter must be set on Alembic ROP")
+ if not root.startswith("/"):
+ errors.append("Root parameter must start with slash /")
+ if not objects:
+ errors.append("Objects parameter must be set on Alembic ROP")
+ if len(objects.split(" ")) != 1:
+ errors.append("Must have only a single object.")
+
+ if errors:
+ for error in errors:
+ self.log.error(error)
+ raise PublishValidationError(
+ "Some checks failed, see validator log.",
+ title=self.label)
# Check if the object exists and is a camera
path = root + "/" + objects
camera = hou.node(path)
if not camera:
- raise ValueError("Camera path does not exist: %s" % path)
+ raise PublishValidationError(
+ "Camera path does not exist: %s" % path,
+ title=self.label)
if camera.type().name() != "cam":
- raise ValueError(
- "Object set in Alembic ROP is not a camera: "
- "%s (type: %s)" % (camera, camera.type().name())
- )
+ raise PublishValidationError(
+ ("Object set in Alembic ROP is not a camera: "
+ "{} (type: {})").format(camera, camera.type().name()),
+ title=self.label)
diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py
index 543539ffe3..1d0377c818 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py
@@ -1,4 +1,9 @@
+# -*- coding: utf-8 -*-
+import sys
import pyblish.api
+import six
+
+from openpype.pipeline import PublishValidationError
class ValidateCopOutputNode(pyblish.api.InstancePlugin):
@@ -20,9 +25,10 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
invalid = self.get_invalid(instance)
if invalid:
- raise RuntimeError(
- "Output node(s) `%s` are incorrect. "
- "See plug-in log for details." % invalid
+ raise PublishValidationError(
+ ("Output node(s) `{}` are incorrect. "
+ "See plug-in log for details.").format(invalid),
+ title=self.label
)
@classmethod
@@ -30,10 +36,19 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
import hou
- output_node = instance.data["output_node"]
+ try:
+ output_node = instance.data["output_node"]
+ except KeyError:
+ six.reraise(
+ PublishValidationError,
+ PublishValidationError(
+ "Can't determine COP output node.",
+ title=cls.__name__),
+ sys.exc_info()[2]
+ )
if output_node is None:
- node = instance[0]
+ node = hou.node(instance.get("instance_node"))
cls.log.error(
"COP Output node in '%s' does not exist. "
"Ensure a valid COP output path is set." % node.path()
@@ -54,7 +69,8 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
# For the sake of completeness also assert the category type
# is Cop2 to avoid potential edge case scenarios even though
# the isinstance check above should be stricter than this category
- assert output_node.type().category().name() == "Cop2", (
- "Output node %s is not of category Cop2. This is a bug.."
- % output_node.path()
- )
+ if output_node.type().category().name() != "Cop2":
+ raise PublishValidationError(
+ ("Output node %s is not of category Cop2. "
+ "This is a bug...").format(output_node.path()),
+ title=cls.label)
diff --git a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py
index b26d28a1e7..4584e78f4f 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py
@@ -1,7 +1,11 @@
+# -*- coding: utf-8 -*-
import os
import pyblish.api
from openpype.hosts.houdini.api import lib
+from openpype.pipeline import PublishValidationError
+
+import hou
class ValidateFileExtension(pyblish.api.InstancePlugin):
@@ -29,15 +33,16 @@ class ValidateFileExtension(pyblish.api.InstancePlugin):
invalid = self.get_invalid(instance)
if invalid:
- raise RuntimeError(
- "ROP node has incorrect " "file extension: %s" % invalid
+ raise PublishValidationError(
+ "ROP node has incorrect file extension: {}".format(invalid),
+ title=self.label
)
@classmethod
def get_invalid(cls, instance):
# Get ROP node from instance
- node = instance[0]
+ node = hou.node(instance.data["instance_node"])
# Create lookup for current family in instance
families = []
@@ -53,7 +58,9 @@ class ValidateFileExtension(pyblish.api.InstancePlugin):
for family in families:
extension = cls.family_extensions.get(family, None)
if extension is None:
- raise RuntimeError("Unsupported family: %s" % family)
+ raise PublishValidationError(
+ "Unsupported family: {}".format(family),
+ title=cls.label)
if output_extension != extension:
return [node.path()]
diff --git a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py
index 76b5910576..b5f6ba71e1 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py
@@ -1,6 +1,7 @@
import pyblish.api
from openpype.hosts.houdini.api import lib
+import hou
class ValidateFrameToken(pyblish.api.InstancePlugin):
@@ -36,7 +37,7 @@ class ValidateFrameToken(pyblish.api.InstancePlugin):
@classmethod
def get_invalid(cls, instance):
- node = instance[0]
+ node = hou.node(instance.get("instance_node"))
# Check trange parm, 0 means Render Current Frame
frame_range = node.evalParm("trange")
diff --git a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py
index f5f03aa844..f1c52f22c1 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py
@@ -1,4 +1,6 @@
+# -*- coding: utf-8 -*-
import pyblish.api
+from openpype.pipeline import PublishValidationError
class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin):
@@ -24,7 +26,7 @@ class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin):
license = hou.licenseCategory()
if license != hou.licenseCategoryType.Commercial:
- raise RuntimeError(
- "USD Publishing requires a full Commercial "
- "license. You are on: %s" % license
- )
+ raise PublishValidationError(
+ ("USD Publishing requires a full Commercial "
+ "license. You are on: {}").format(license),
+ title=self.label)
diff --git a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py
index be6a798a95..9d1f92a101 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py
@@ -1,11 +1,12 @@
+# -*- coding: utf-8 -*-
import pyblish.api
-from openpype.pipeline.publish import ValidateContentsOrder
+from openpype.pipeline import PublishValidationError
class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin):
"""Validate Create Intermediate Directories is enabled on ROP node."""
- order = ValidateContentsOrder
+ order = pyblish.api.ValidatorOrder
families = ["pointcache", "camera", "vdbcache"]
hosts = ["houdini"]
label = "Create Intermediate Directories Checked"
@@ -14,10 +15,10 @@ class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin):
invalid = self.get_invalid(instance)
if invalid:
- raise RuntimeError(
- "Found ROP node with Create Intermediate "
- "Directories turned off: %s" % invalid
- )
+ raise PublishValidationError(
+ ("Found ROP node with Create Intermediate "
+ "Directories turned off: {}".format(invalid)),
+ title=self.label)
@classmethod
def get_invalid(cls, instance):
diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py
index 76635d4ed5..f7c95aaf4e 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py
@@ -1,6 +1,7 @@
+# -*- coding: utf-8 -*-
import pyblish.api
import hou
-from openpype.pipeline.publish import ValidateContentsOrder
+from openpype.pipeline import PublishValidationError
def cook_in_range(node, start, end):
@@ -28,7 +29,7 @@ def get_errors(node):
class ValidateNoErrors(pyblish.api.InstancePlugin):
"""Validate the Instance has no current cooking errors."""
- order = ValidateContentsOrder
+ order = pyblish.api.ValidatorOrder
hosts = ["houdini"]
label = "Validate no errors"
@@ -37,7 +38,7 @@ class ValidateNoErrors(pyblish.api.InstancePlugin):
validate_nodes = []
if len(instance) > 0:
- validate_nodes.append(instance[0])
+ validate_nodes.append(hou.node(instance.get("instance_node")))
output_node = instance.data.get("output_node")
if output_node:
validate_nodes.append(output_node)
@@ -62,4 +63,6 @@ class ValidateNoErrors(pyblish.api.InstancePlugin):
errors = get_errors(node)
if errors:
self.log.error(errors)
- raise RuntimeError("Node has errors: %s" % node.path())
+ raise PublishValidationError(
+ "Node has errors: {}".format(node.path()),
+ title=self.label)
diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py
index 7a8cd04f15..d3a4c0cfbf 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py
@@ -1,5 +1,8 @@
+# -*- coding: utf-8 -*-
import pyblish.api
from openpype.pipeline.publish import ValidateContentsOrder
+from openpype.pipeline import PublishValidationError
+import hou
class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
@@ -19,19 +22,26 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
- raise RuntimeError(
- "See log for details. " "Invalid nodes: {0}".format(invalid)
+ raise PublishValidationError(
+ "See log for details. " "Invalid nodes: {0}".format(invalid),
+ title=self.label
)
@classmethod
def get_invalid(cls, instance):
- import hou
+ output_node = instance.data.get("output_node")
+ rop_node = hou.node(instance.data["instance_node"])
- output = instance.data["output_node"]
+ if output_node is None:
+ cls.log.error(
+ "SOP Output node in '%s' does not exist. "
+ "Ensure a valid SOP output path is set." % rop_node.path()
+ )
- rop = instance[0]
- build_from_path = rop.parm("build_from_path").eval()
+ return [rop_node.path()]
+
+ build_from_path = rop_node.parm("build_from_path").eval()
if not build_from_path:
cls.log.debug(
"Alembic ROP has 'Build from Path' disabled. "
@@ -39,20 +49,20 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
)
return
- path_attr = rop.parm("path_attrib").eval()
+ path_attr = rop_node.parm("path_attrib").eval()
if not path_attr:
cls.log.error(
"The Alembic ROP node has no Path Attribute"
"value set, but 'Build Hierarchy from Attribute'"
"is enabled."
)
- return [rop.path()]
+ return [rop_node.path()]
cls.log.debug("Checking for attribute: %s" % path_attr)
# Check if the primitive attribute exists
frame = instance.data.get("frameStart", 0)
- geo = output.geometryAtFrame(frame)
+ geo = output_node.geometryAtFrame(frame)
# If there are no primitives on the current frame then we can't
# check whether the path names are correct. So we'll just issue a
@@ -73,7 +83,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
"Geometry Primitives are missing "
"path attribute: `%s`" % path_attr
)
- return [output.path()]
+ return [output_node.path()]
# Ensure at least a single string value is present
if not attrib.strings():
@@ -81,7 +91,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
"Primitive path attribute has no "
"string values: %s" % path_attr
)
- return [output.path()]
+ return [output_node.path()]
paths = geo.primStringAttribValues(path_attr)
# Ensure all primitives are set to a valid path
@@ -93,4 +103,4 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
"Prims have no value for attribute `%s` "
"(%s of %s prims)" % (path_attr, len(invalid_prims), num_prims)
)
- return [output.path()]
+ return [output_node.path()]
diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py
index 0ab182c584..4e8e5fc0e8 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py
@@ -1,7 +1,9 @@
+# -*-coding: utf-8 -*-
import pyblish.api
from openpype.hosts.houdini.api import lib
from openpype.pipeline.publish import RepairContextAction
+from openpype.pipeline import PublishValidationError
import hou
@@ -27,17 +29,24 @@ class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin):
# We ensure it's a shell node and that it has the pre-render script
# set correctly. Plus the shell script it will trigger should be
# completely empty (doing nothing)
- assert node.type().name() == "shell", "Must be shell ROP node"
- assert node.parm("command").eval() == "", "Must have no command"
- assert not node.parm("shellexec").eval(), "Must not execute in shell"
- assert (
- node.parm("prerender").eval() == cmd
- ), "REMOTE_PUBLISH node does not have correct prerender script."
- assert (
- node.parm("lprerender").eval() == "python"
- ), "REMOTE_PUBLISH node prerender script type not set to 'python'"
+ if node.type().name() != "shell":
+ self.raise_error("Must be shell ROP node")
+ if node.parm("command").eval() != "":
+ self.raise_error("Must have no command")
+ if node.parm("shellexec").eval():
+ self.raise_error("Must not execute in shell")
+ if node.parm("prerender").eval() != cmd:
+ self.raise_error(("REMOTE_PUBLISH node does not have "
+ "correct prerender script."))
+ if node.parm("lprerender").eval() != "python":
+ self.raise_error(("REMOTE_PUBLISH node prerender script "
+ "type not set to 'python'"))
@classmethod
def repair(cls, context):
"""(Re)create the node if it fails to pass validation."""
lib.create_remote_publish_node(force=True)
+
+ def raise_error(self, message):
+ self.log.error(message)
+ raise PublishValidationError(message, title=self.label)
diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py
index afc8df7528..8ec62f4e85 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py
@@ -1,7 +1,9 @@
+# -*- coding: utf-8 -*-
import pyblish.api
import hou
from openpype.pipeline.publish import RepairContextAction
+from openpype.pipeline import PublishValidationError
class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin):
@@ -18,10 +20,12 @@ class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin):
node = hou.node("/out/REMOTE_PUBLISH")
if not node:
- raise RuntimeError("Missing REMOTE_PUBLISH node.")
+ raise PublishValidationError(
+ "Missing REMOTE_PUBLISH node.", title=self.label)
if node.isBypassed():
- raise RuntimeError("REMOTE_PUBLISH must not be bypassed.")
+ raise PublishValidationError(
+ "REMOTE_PUBLISH must not be bypassed.", title=self.label)
@classmethod
def repair(cls, context):
@@ -29,7 +33,8 @@ class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin):
node = hou.node("/out/REMOTE_PUBLISH")
if not node:
- raise RuntimeError("Missing REMOTE_PUBLISH node.")
+ raise PublishValidationError(
+ "Missing REMOTE_PUBLISH node.", title=cls.label)
cls.log.info("Disabling bypass on /out/REMOTE_PUBLISH")
node.bypass(False)
diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py
index a5a07b1b1a..ed7f438729 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py
@@ -1,4 +1,6 @@
+# -*- coding: utf-8 -*-
import pyblish.api
+from openpype.pipeline import PublishValidationError
class ValidateSopOutputNode(pyblish.api.InstancePlugin):
@@ -22,9 +24,9 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin):
invalid = self.get_invalid(instance)
if invalid:
- raise RuntimeError(
- "Output node(s) `%s` are incorrect. "
- "See plug-in log for details." % invalid
+ raise PublishValidationError(
+ "Output node(s) are incorrect",
+ title="Invalid output node(s)"
)
@classmethod
@@ -32,10 +34,10 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin):
import hou
- output_node = instance.data["output_node"]
+ output_node = instance.data.get("output_node")
if output_node is None:
- node = instance[0]
+ node = hou.node(instance.data["instance_node"])
cls.log.error(
"SOP Output node in '%s' does not exist. "
"Ensure a valid SOP output path is set." % node.path()
@@ -56,10 +58,11 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin):
# For the sake of completeness also assert the category type
# is Sop to avoid potential edge case scenarios even though
# the isinstance check above should be stricter than this category
- assert output_node.type().category().name() == "Sop", (
- "Output node %s is not of category Sop. This is a bug.."
- % output_node.path()
- )
+ if output_node.type().category().name() != "Sop":
+ raise PublishValidationError(
+ ("Output node {} is not of category Sop. "
+ "This is a bug.").format(output_node.path()),
+ title=cls.label)
# Ensure the node is cooked and succeeds to cook so we can correctly
# check for its geometry data.
diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py
index ac0181aed2..a0e2302495 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py
@@ -1,6 +1,10 @@
+# -*- coding: utf-8 -*-
import pyblish.api
import openpype.hosts.houdini.api.usd as hou_usdlib
+from openpype.pipeline import PublishValidationError
+
+import hou
class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin):
@@ -24,7 +28,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin):
def process(self, instance):
- rop = instance[0]
+ rop = hou.node(instance.get("instance_node"))
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
stage = lop_path.stage(apply_viewport_overrides=False)
@@ -44,7 +48,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin):
invalid.append(layer)
if invalid:
- raise RuntimeError(
+ raise PublishValidationError((
"Loaded layers have backslashes. "
- "This is invalid for HUSK USD rendering."
- )
+ "This is invalid for HUSK USD rendering."),
+ title=self.label)
diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py
index 2fd2f5eb9f..a55eb70cb2 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py
@@ -1,10 +1,13 @@
+# -*- coding: utf-8 -*-
import pyblish.api
import openpype.hosts.houdini.api.usd as hou_usdlib
-
+from openpype.pipeline import PublishValidationError
from pxr import UsdShade, UsdRender, UsdLux
+import hou
+
def fullname(o):
"""Get fully qualified class name"""
@@ -37,7 +40,7 @@ class ValidateUsdModel(pyblish.api.InstancePlugin):
def process(self, instance):
- rop = instance[0]
+ rop = hou.node(instance.get("instance_node"))
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
stage = lop_path.stage(apply_viewport_overrides=False)
@@ -55,7 +58,8 @@ class ValidateUsdModel(pyblish.api.InstancePlugin):
if invalid:
prim_paths = sorted([str(prim.GetPath()) for prim in invalid])
- raise RuntimeError("Found invalid primitives: %s" % prim_paths)
+ raise PublishValidationError(
+ "Found invalid primitives: {}".format(prim_paths))
class ValidateUsdShade(ValidateUsdModel):
diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py
index 1f10fafdf4..af21efcafc 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py
@@ -1,4 +1,6 @@
+# -*- coding: utf-8 -*-
import pyblish.api
+from openpype.pipeline import PublishValidationError
class ValidateUSDOutputNode(pyblish.api.InstancePlugin):
@@ -20,9 +22,10 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin):
invalid = self.get_invalid(instance)
if invalid:
- raise RuntimeError(
- "Output node(s) `%s` are incorrect. "
- "See plug-in log for details." % invalid
+ raise PublishValidationError(
+ ("Output node(s) `{}` are incorrect. "
+ "See plug-in log for details.").format(invalid),
+ title=self.label
)
@classmethod
@@ -33,7 +36,7 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin):
output_node = instance.data["output_node"]
if output_node is None:
- node = instance[0]
+ node = hou.node(instance.get("instance_node"))
cls.log.error(
"USD node '%s' LOP path does not exist. "
"Ensure a valid LOP path is set." % node.path()
diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py
index 36336a03ae..02c44ab94e 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py
@@ -1,6 +1,8 @@
+# -*- coding: utf-8 -*-
+import os
import pyblish.api
-import os
+from openpype.pipeline import PublishValidationError
class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin):
@@ -28,4 +30,5 @@ class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin):
if invalid:
for message in invalid:
self.log.error(message)
- raise RuntimeError("USD Render Paths are invalid.")
+ raise PublishValidationError(
+ "USD Render Paths are invalid.", title=self.label)
diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py
index fb1094e6b5..01ebc0e828 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py
@@ -1,6 +1,8 @@
+# -*- coding: utf-8 -*-
import pyblish.api
import openpype.hosts.houdini.api.usd as hou_usdlib
+from openpype.pipeline import PublishValidationError
class ValidateUsdSetDress(pyblish.api.InstancePlugin):
@@ -20,8 +22,9 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin):
def process(self, instance):
from pxr import UsdGeom
+ import hou
- rop = instance[0]
+ rop = hou.node(instance.get("instance_node"))
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
stage = lop_path.stage(apply_viewport_overrides=False)
@@ -47,8 +50,9 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin):
invalid.append(node)
if invalid:
- raise RuntimeError(
+ raise PublishValidationError((
"SetDress contains local geometry. "
"This is not allowed, it must be an assembly "
- "of referenced assets."
+ "of referenced assets."),
+ title=self.label
)
diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py
index f08c7c72c5..c4f118ac3b 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py
@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
import re
import pyblish.api
@@ -5,6 +6,7 @@ import pyblish.api
from openpype.client import get_subset_by_name
from openpype.pipeline import legacy_io
from openpype.pipeline.publish import ValidateContentsOrder
+from openpype.pipeline import PublishValidationError
class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin):
@@ -32,7 +34,8 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin):
project_name, model_subset, asset_doc["_id"], fields=["_id"]
)
if not subset_doc:
- raise RuntimeError(
- "USD Model subset not found: "
- "%s (%s)" % (model_subset, asset_name)
+ raise PublishValidationError(
+ ("USD Model subset not found: "
+ "{} ({})").format(model_subset, asset_name),
+ title=self.label
)
diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py
index a4902b48a9..bd3366a424 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py
@@ -1,5 +1,6 @@
+# -*- coding: utf-8 -*-
import pyblish.api
-from openpype.pipeline.publish import ValidateContentsOrder
+from openpype.pipeline import PublishValidationError
import hou
@@ -12,14 +13,14 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin):
"""
- order = ValidateContentsOrder
+ order = pyblish.api.ValidatorOrder
hosts = ["houdini"]
families = ["usdShade"]
label = "USD Shade Workspace"
def process(self, instance):
- rop = instance[0]
+ rop = hou.node(instance.get("instance_node"))
workspace = rop.parent()
definition = workspace.type().definition()
@@ -39,13 +40,14 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin):
if node_type != other_node_type:
continue
- # Get highest version
+ # Get the highest version
highest = max(highest, other_version)
if version != highest:
- raise RuntimeError(
- "Shading Workspace is not the latest version."
- " Found %s. Latest is %s." % (version, highest)
+ raise PublishValidationError(
+ ("Shading Workspace is not the latest version."
+ " Found {}. Latest is {}.").format(version, highest),
+ title=self.label
)
# There were some issues with the editable node not having the right
@@ -56,8 +58,9 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin):
)
rop_value = rop.parm("lopoutput").rawValue()
if rop_value != value:
- raise RuntimeError(
- "Shading Workspace has invalid 'lopoutput'"
- " parameter value. The Shading Workspace"
- " needs to be reset to its default values."
+ raise PublishValidationError(
+ ("Shading Workspace has invalid 'lopoutput'"
+ " parameter value. The Shading Workspace"
+ " needs to be reset to its default values."),
+ title=self.label
)
diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py
index ac408bc842..1f9ccc9c42 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py
@@ -1,5 +1,8 @@
+# -*- coding: utf-8 -*-
import pyblish.api
-from openpype.pipeline.publish import ValidateContentsOrder
+from openpype.pipeline import (
+ PublishValidationError
+)
class ValidateVDBInputNode(pyblish.api.InstancePlugin):
@@ -16,7 +19,7 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin):
"""
- order = ValidateContentsOrder + 0.1
+ order = pyblish.api.ValidatorOrder + 0.1
families = ["vdbcache"]
hosts = ["houdini"]
label = "Validate Input Node (VDB)"
@@ -24,8 +27,10 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin):
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
- raise RuntimeError(
- "Node connected to the output node is not" "of type VDB!"
+ raise PublishValidationError(
+ self,
+ "Node connected to the output node is not of type VDB",
+ title=self.label
)
@classmethod
diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py
index 55ed581d4c..61c1209fc9 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py
@@ -1,6 +1,7 @@
+# -*- coding: utf-8 -*-
import pyblish.api
import hou
-from openpype.pipeline.publish import ValidateContentsOrder
+from openpype.pipeline import PublishValidationError
class ValidateVDBOutputNode(pyblish.api.InstancePlugin):
@@ -17,7 +18,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin):
"""
- order = ValidateContentsOrder + 0.1
+ order = pyblish.api.ValidatorOrder + 0.1
families = ["vdbcache"]
hosts = ["houdini"]
label = "Validate Output Node (VDB)"
@@ -25,8 +26,9 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin):
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
- raise RuntimeError(
- "Node connected to the output node is not" " of type VDB!"
+ raise PublishValidationError(
+ "Node connected to the output node is not" " of type VDB!",
+ title=self.label
)
@classmethod
@@ -36,7 +38,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin):
if node is None:
cls.log.error(
"SOP path is not correctly set on "
- "ROP node '%s'." % instance[0].path()
+ "ROP node '%s'." % instance.get("instance_node")
)
return [instance]
diff --git a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py
index 560b355e21..7707cc2dba 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py
@@ -1,11 +1,17 @@
# -*- coding: utf-8 -*-
import pyblish.api
import hou
+from openpype.pipeline import (
+ PublishValidationError,
+ OptionalPyblishPluginMixin
+)
+from openpype.pipeline.publish import RepairAction
from openpype.pipeline.publish import RepairAction
-class ValidateWorkfilePaths(pyblish.api.InstancePlugin):
+class ValidateWorkfilePaths(
+ pyblish.api.InstancePlugin, OptionalPyblishPluginMixin):
"""Validate workfile paths so they are absolute."""
order = pyblish.api.ValidatorOrder
@@ -19,6 +25,8 @@ class ValidateWorkfilePaths(pyblish.api.InstancePlugin):
prohibited_vars = ["$HIP", "$JOB"]
def process(self, instance):
+ if not self.is_active(instance.data):
+ return
invalid = self.get_invalid()
self.log.info(
"node types to check: {}".format(", ".join(self.node_types)))
@@ -30,15 +38,16 @@ class ValidateWorkfilePaths(pyblish.api.InstancePlugin):
self.log.error(
"{}: {}".format(param.path(), param.unexpandedString()))
- raise RuntimeError("Invalid paths found")
+ raise PublishValidationError(
+ "Invalid paths found", title=self.label)
@classmethod
def get_invalid(cls):
invalid = []
for param, _ in hou.fileReferences():
- if param is None:
+ # it might return None for some reason
+ if not param:
continue
-
# skip nodes we are not interested in
if param.node().type().name() not in cls.node_types:
continue
diff --git a/openpype/hosts/houdini/startup/MainMenuCommon.xml b/openpype/hosts/houdini/startup/MainMenuCommon.xml
index abfa3f136e..c08114b71b 100644
--- a/openpype/hosts/houdini/startup/MainMenuCommon.xml
+++ b/openpype/hosts/houdini/startup/MainMenuCommon.xml
@@ -1,10 +1,10 @@
-