mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
Merge remote-tracking branch 'origin/develop' into enhancement/headless-and-validation
This commit is contained in:
commit
014f10a023
459 changed files with 10324 additions and 2847 deletions
2
.github/workflows/prerelease.yml
vendored
2
.github/workflows/prerelease.yml
vendored
|
|
@ -47,7 +47,7 @@ jobs:
|
|||
enhancementLabel: '**🚀 Enhancements**'
|
||||
bugsLabel: '**🐛 Bug fixes**'
|
||||
deprecatedLabel: '**⚠️ Deprecations**'
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]}}'
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"### 🆕 New features","labels":["feature"]},}'
|
||||
issues: false
|
||||
issuesWoLabels: false
|
||||
sinceTag: "3.0.0"
|
||||
|
|
|
|||
10
.gitmodules
vendored
10
.gitmodules
vendored
|
|
@ -4,9 +4,9 @@
|
|||
[submodule "repos/avalon-unreal-integration"]
|
||||
path = repos/avalon-unreal-integration
|
||||
url = https://github.com/pypeclub/avalon-unreal-integration.git
|
||||
[submodule "openpype/modules/ftrack/python2_vendor/ftrack-python-api"]
|
||||
path = openpype/modules/ftrack/python2_vendor/ftrack-python-api
|
||||
[submodule "openpype/modules/default_modules/ftrack/python2_vendor/arrow"]
|
||||
path = openpype/modules/default_modules/ftrack/python2_vendor/arrow
|
||||
url = https://github.com/arrow-py/arrow.git
|
||||
[submodule "openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api"]
|
||||
path = openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api
|
||||
url = https://bitbucket.org/ftrack/ftrack-python-api.git
|
||||
[submodule "openpype/modules/ftrack/python2_vendor/arrow"]
|
||||
path = openpype/modules/ftrack/python2_vendor/arrow
|
||||
url = https://github.com/arrow-py/arrow.git
|
||||
103
CHANGELOG.md
103
CHANGELOG.md
|
|
@ -1,10 +1,35 @@
|
|||
# Changelog
|
||||
|
||||
## [3.3.1-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD)
|
||||
## [3.4.0-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.0...HEAD)
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.1...HEAD)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
**Merged pull requests:**
|
||||
|
||||
- Ftrack: Fix hosts attribute in collect ftrack username [\#1972](https://github.com/pypeclub/OpenPype/pull/1972)
|
||||
- Removed deprecated submodules [\#1967](https://github.com/pypeclub/OpenPype/pull/1967)
|
||||
- Launcher: Fix crashes on action click [\#1964](https://github.com/pypeclub/OpenPype/pull/1964)
|
||||
- Settings: Minor fixes in UI and missing default values [\#1963](https://github.com/pypeclub/OpenPype/pull/1963)
|
||||
- Blender: Toggle system console works on windows [\#1962](https://github.com/pypeclub/OpenPype/pull/1962)
|
||||
- Resolve path when adding to zip [\#1960](https://github.com/pypeclub/OpenPype/pull/1960)
|
||||
- Bump url-parse from 1.5.1 to 1.5.3 in /website [\#1958](https://github.com/pypeclub/OpenPype/pull/1958)
|
||||
- Global: Avalon Host name collector [\#1949](https://github.com/pypeclub/OpenPype/pull/1949)
|
||||
- Global: Define hosts in CollectSceneVersion [\#1948](https://github.com/pypeclub/OpenPype/pull/1948)
|
||||
- Maya: Add Xgen family support [\#1947](https://github.com/pypeclub/OpenPype/pull/1947)
|
||||
- Add face sets to exported alembics [\#1942](https://github.com/pypeclub/OpenPype/pull/1942)
|
||||
- Bump path-parse from 1.0.6 to 1.0.7 in /website [\#1933](https://github.com/pypeclub/OpenPype/pull/1933)
|
||||
- \#1894 - adds host to template\_name\_profiles for filtering [\#1915](https://github.com/pypeclub/OpenPype/pull/1915)
|
||||
- Environments: Tool environments in alphabetical order [\#1910](https://github.com/pypeclub/OpenPype/pull/1910)
|
||||
- Disregard publishing time. [\#1888](https://github.com/pypeclub/OpenPype/pull/1888)
|
||||
- Feature/webpublisher backend [\#1876](https://github.com/pypeclub/OpenPype/pull/1876)
|
||||
- Dynamic modules [\#1872](https://github.com/pypeclub/OpenPype/pull/1872)
|
||||
- Houdini: add Camera, Point Cache, Composite, Redshift ROP and VDB Cache support [\#1821](https://github.com/pypeclub/OpenPype/pull/1821)
|
||||
|
||||
## [3.3.1](https://github.com/pypeclub/OpenPype/tree/3.3.1) (2021-08-20)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.3.1-nightly.1...3.3.1)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- TVPaint: Fixed rendered frame indexes [\#1946](https://github.com/pypeclub/OpenPype/pull/1946)
|
||||
- Maya: Menu actions fix [\#1945](https://github.com/pypeclub/OpenPype/pull/1945)
|
||||
|
|
@ -15,81 +40,57 @@
|
|||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.3.0-nightly.11...3.3.0)
|
||||
|
||||
**🚀 Enhancements**
|
||||
**Merged pull requests:**
|
||||
|
||||
- Python console interpreter [\#1940](https://github.com/pypeclub/OpenPype/pull/1940)
|
||||
- Fix - make AE workfile publish to Ftrack configurable [\#1937](https://github.com/pypeclub/OpenPype/pull/1937)
|
||||
- Fix - ftrack family was added incorrectly in some cases [\#1935](https://github.com/pypeclub/OpenPype/pull/1935)
|
||||
- Settings UI: Breadcrumbs in settings [\#1932](https://github.com/pypeclub/OpenPype/pull/1932)
|
||||
- Fix - Deadline publish on Linux started Tray instead of headless publishing [\#1930](https://github.com/pypeclub/OpenPype/pull/1930)
|
||||
- Maya: Validate Model Name - repair accident deletion in settings defaults [\#1929](https://github.com/pypeclub/OpenPype/pull/1929)
|
||||
- Global: Updated logos and Default settings [\#1927](https://github.com/pypeclub/OpenPype/pull/1927)
|
||||
- Nuke: submit to farm failed due `ftrack` family remove [\#1926](https://github.com/pypeclub/OpenPype/pull/1926)
|
||||
- Check for missing ✨ Python when using `pyenv` [\#1925](https://github.com/pypeclub/OpenPype/pull/1925)
|
||||
- Maya: Scene patching 🩹on submission to Deadline [\#1923](https://github.com/pypeclub/OpenPype/pull/1923)
|
||||
- Fix - validate takes repre\["files"\] as list all the time [\#1922](https://github.com/pypeclub/OpenPype/pull/1922)
|
||||
- Settings: Default values for enum [\#1920](https://github.com/pypeclub/OpenPype/pull/1920)
|
||||
- Settings UI: Modifiable dict view enhance [\#1919](https://github.com/pypeclub/OpenPype/pull/1919)
|
||||
- standalone: validator asset parents [\#1917](https://github.com/pypeclub/OpenPype/pull/1917)
|
||||
- Nuke: update video file crassing [\#1916](https://github.com/pypeclub/OpenPype/pull/1916)
|
||||
- Fix - texture validators for workfiles triggers only for textures workfiles [\#1914](https://github.com/pypeclub/OpenPype/pull/1914)
|
||||
- submodules: avalon-core update [\#1911](https://github.com/pypeclub/OpenPype/pull/1911)
|
||||
- Settings UI: List order works as expected [\#1906](https://github.com/pypeclub/OpenPype/pull/1906)
|
||||
- Add support for multiple Deadline ☠️➖ servers [\#1905](https://github.com/pypeclub/OpenPype/pull/1905)
|
||||
- Hiero: loaded clip was not set colorspace from version data [\#1904](https://github.com/pypeclub/OpenPype/pull/1904)
|
||||
- Pyblish UI: Fix collecting stage processing [\#1903](https://github.com/pypeclub/OpenPype/pull/1903)
|
||||
- Burnins: Use input's bitrate in h624 [\#1902](https://github.com/pypeclub/OpenPype/pull/1902)
|
||||
- Feature AE local render [\#1901](https://github.com/pypeclub/OpenPype/pull/1901)
|
||||
- Ftrack: Where I run action enhancement [\#1900](https://github.com/pypeclub/OpenPype/pull/1900)
|
||||
- Ftrack: Private project server actions [\#1899](https://github.com/pypeclub/OpenPype/pull/1899)
|
||||
- Support nested studio plugins paths. [\#1898](https://github.com/pypeclub/OpenPype/pull/1898)
|
||||
- Bug: fixed python detection [\#1893](https://github.com/pypeclub/OpenPype/pull/1893)
|
||||
- Settings: global validators with options [\#1892](https://github.com/pypeclub/OpenPype/pull/1892)
|
||||
- Settings: Conditional dict enum positioning [\#1891](https://github.com/pypeclub/OpenPype/pull/1891)
|
||||
- global: integrate name missing default template [\#1890](https://github.com/pypeclub/OpenPype/pull/1890)
|
||||
- publisher: editorial plugins fixes [\#1889](https://github.com/pypeclub/OpenPype/pull/1889)
|
||||
- Expose stop timer through rest api. [\#1886](https://github.com/pypeclub/OpenPype/pull/1886)
|
||||
- TVPaint: Increment workfile [\#1885](https://github.com/pypeclub/OpenPype/pull/1885)
|
||||
- Allow Multiple Notes to run on tasks. [\#1882](https://github.com/pypeclub/OpenPype/pull/1882)
|
||||
- Normalize path returned from Workfiles. [\#1880](https://github.com/pypeclub/OpenPype/pull/1880)
|
||||
- Prepare for pyside2 [\#1869](https://github.com/pypeclub/OpenPype/pull/1869)
|
||||
- Filter hosts in settings host-enum [\#1868](https://github.com/pypeclub/OpenPype/pull/1868)
|
||||
- Local actions with process identifier [\#1867](https://github.com/pypeclub/OpenPype/pull/1867)
|
||||
- Workfile tool start at host launch support [\#1865](https://github.com/pypeclub/OpenPype/pull/1865)
|
||||
- Maya: support for configurable `dirmap` 🗺️ [\#1859](https://github.com/pypeclub/OpenPype/pull/1859)
|
||||
- Settings list can use template or schema as object type [\#1815](https://github.com/pypeclub/OpenPype/pull/1815)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- Fix - ftrack family was added incorrectly in some cases [\#1935](https://github.com/pypeclub/OpenPype/pull/1935)
|
||||
- Fix - Deadline publish on Linux started Tray instead of headless publishing [\#1930](https://github.com/pypeclub/OpenPype/pull/1930)
|
||||
- Maya: Validate Model Name - repair accident deletion in settings defaults [\#1929](https://github.com/pypeclub/OpenPype/pull/1929)
|
||||
- Nuke: submit to farm failed due `ftrack` family remove [\#1926](https://github.com/pypeclub/OpenPype/pull/1926)
|
||||
- Fix - validate takes repre\["files"\] as list all the time [\#1922](https://github.com/pypeclub/OpenPype/pull/1922)
|
||||
- standalone: validator asset parents [\#1917](https://github.com/pypeclub/OpenPype/pull/1917)
|
||||
- Nuke: update video file crassing [\#1916](https://github.com/pypeclub/OpenPype/pull/1916)
|
||||
- Fix - texture validators for workfiles triggers only for textures workfiles [\#1914](https://github.com/pypeclub/OpenPype/pull/1914)
|
||||
- Fix - validators for textures workfiles trigger only for textures workfiles [\#1913](https://github.com/pypeclub/OpenPype/pull/1913)
|
||||
- Settings UI: List order works as expected [\#1906](https://github.com/pypeclub/OpenPype/pull/1906)
|
||||
- Hiero: loaded clip was not set colorspace from version data [\#1904](https://github.com/pypeclub/OpenPype/pull/1904)
|
||||
- Pyblish UI: Fix collecting stage processing [\#1903](https://github.com/pypeclub/OpenPype/pull/1903)
|
||||
- Burnins: Use input's bitrate in h624 [\#1902](https://github.com/pypeclub/OpenPype/pull/1902)
|
||||
- Bug: fixed python detection [\#1893](https://github.com/pypeclub/OpenPype/pull/1893)
|
||||
- global: integrate name missing default template [\#1890](https://github.com/pypeclub/OpenPype/pull/1890)
|
||||
- publisher: editorial plugins fixes [\#1889](https://github.com/pypeclub/OpenPype/pull/1889)
|
||||
- Normalize path returned from Workfiles. [\#1880](https://github.com/pypeclub/OpenPype/pull/1880)
|
||||
- Workfiles tool event arguments fix [\#1862](https://github.com/pypeclub/OpenPype/pull/1862)
|
||||
- Maya: don't add reference members as connections to the container set 📦 [\#1855](https://github.com/pypeclub/OpenPype/pull/1855)
|
||||
- Settings error dialog on show [\#1798](https://github.com/pypeclub/OpenPype/pull/1798)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Fix - make AE workfile publish to Ftrack configurable [\#1937](https://github.com/pypeclub/OpenPype/pull/1937)
|
||||
- Settings UI: Breadcrumbs in settings [\#1932](https://github.com/pypeclub/OpenPype/pull/1932)
|
||||
- Add support for multiple Deadline ☠️➖ servers [\#1905](https://github.com/pypeclub/OpenPype/pull/1905)
|
||||
- Maya: add support for `RedshiftNormalMap` node, fix `tx` linear space 🚀 [\#1863](https://github.com/pypeclub/OpenPype/pull/1863)
|
||||
- Maya: expected files -\> render products ⚙️ overhaul [\#1812](https://github.com/pypeclub/OpenPype/pull/1812)
|
||||
- Workfiles tool event arguments fix [\#1862](https://github.com/pypeclub/OpenPype/pull/1862)
|
||||
- Maya: support for configurable `dirmap` 🗺️ [\#1859](https://github.com/pypeclub/OpenPype/pull/1859)
|
||||
- Maya: don't add reference members as connections to the container set 📦 [\#1855](https://github.com/pypeclub/OpenPype/pull/1855)
|
||||
- Settings list can use template or schema as object type [\#1815](https://github.com/pypeclub/OpenPype/pull/1815)
|
||||
|
||||
## [3.2.0](https://github.com/pypeclub/OpenPype/tree/3.2.0) (2021-07-13)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.2.0-nightly.7...3.2.0)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- Nuke: ftrack family plugin settings preset [\#1805](https://github.com/pypeclub/OpenPype/pull/1805)
|
||||
- Standalone publisher last project [\#1799](https://github.com/pypeclub/OpenPype/pull/1799)
|
||||
- Ftrack Multiple notes as server action [\#1795](https://github.com/pypeclub/OpenPype/pull/1795)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- nuke: fixing wrong name of family folder when `used existing frames` [\#1803](https://github.com/pypeclub/OpenPype/pull/1803)
|
||||
- Collect ftrack family bugs [\#1801](https://github.com/pypeclub/OpenPype/pull/1801)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Build: don't add Poetry to `PATH` [\#1808](https://github.com/pypeclub/OpenPype/pull/1808)
|
||||
|
||||
## [2.18.4](https://github.com/pypeclub/OpenPype/tree/2.18.4) (2021-06-24)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/2.18.3...2.18.4)
|
||||
|
|
|
|||
|
|
@ -537,7 +537,8 @@ class BootstrapRepos:
|
|||
file.resolve().relative_to(openpype_root)
|
||||
)
|
||||
)
|
||||
zip_file.write(file, file.relative_to(openpype_root))
|
||||
zip_file.write(
|
||||
file, file.resolve().relative_to(openpype_root))
|
||||
|
||||
checksums_str = ""
|
||||
for c in checksums:
|
||||
|
|
|
|||
|
|
@ -68,6 +68,10 @@ def patched_discover(superclass):
|
|||
def install():
|
||||
"""Install Pype to Avalon."""
|
||||
from pyblish.lib import MessageHandler
|
||||
from openpype.modules import load_modules
|
||||
|
||||
# Make sure modules are loaded
|
||||
load_modules()
|
||||
|
||||
def modified_emit(obj, record):
|
||||
"""Method replacing `emit` in Pyblish's MessageHandler."""
|
||||
|
|
|
|||
|
|
@ -96,6 +96,31 @@ def eventserver(debug,
|
|||
)
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
|
||||
@click.option("-h", "--host", help="Host", default=None)
|
||||
@click.option("-p", "--port", help="Port", default=None)
|
||||
@click.option("-e", "--executable", help="Executable")
|
||||
@click.option("-u", "--upload_dir", help="Upload dir")
|
||||
def webpublisherwebserver(debug, executable, upload_dir, host=None, port=None):
|
||||
"""Starts webserver for communication with Webpublish FR via command line
|
||||
|
||||
OP must be congigured on a machine, eg. OPENPYPE_MONGO filled AND
|
||||
FTRACK_BOT_API_KEY provided with api key from Ftrack.
|
||||
|
||||
Expect "pype.club" user created on Ftrack.
|
||||
"""
|
||||
if debug:
|
||||
os.environ['OPENPYPE_DEBUG'] = "3"
|
||||
|
||||
PypeCommands().launch_webpublisher_webservercli(
|
||||
upload_dir=upload_dir,
|
||||
executable=executable,
|
||||
host=host,
|
||||
port=port
|
||||
)
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.argument("output_json_path")
|
||||
@click.option("--project", help="Project name", default=None)
|
||||
|
|
@ -133,6 +158,25 @@ def publish(debug, paths, targets):
|
|||
PypeCommands.publish(list(paths), targets)
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.argument("path")
|
||||
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
|
||||
@click.option("-h", "--host", help="Host")
|
||||
@click.option("-u", "--user", help="User email address")
|
||||
@click.option("-p", "--project", help="Project")
|
||||
@click.option("-t", "--targets", help="Targets", default=None,
|
||||
multiple=True)
|
||||
def remotepublish(debug, project, path, host, targets=None, user=None):
|
||||
"""Start CLI publishing.
|
||||
|
||||
Publish collects json from paths provided as an argument.
|
||||
More than one path is allowed.
|
||||
"""
|
||||
if debug:
|
||||
os.environ['OPENPYPE_DEBUG'] = '3'
|
||||
PypeCommands.remotepublish(project, path, host, user, targets=targets)
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
|
||||
@click.option("-p", "--project", required=True,
|
||||
|
|
|
|||
|
|
@ -5,11 +5,12 @@ from typing import Dict, List, Optional
|
|||
|
||||
import bpy
|
||||
|
||||
from avalon import api
|
||||
import avalon.blender
|
||||
from avalon import api, blender
|
||||
from avalon.blender import ops
|
||||
from avalon.blender.pipeline import AVALON_CONTAINERS
|
||||
from openpype.api import PypeCreatorMixin
|
||||
|
||||
VALID_EXTENSIONS = [".blend", ".json", ".abc"]
|
||||
VALID_EXTENSIONS = [".blend", ".json", ".abc", ".fbx"]
|
||||
|
||||
|
||||
def asset_name(
|
||||
|
|
@ -27,32 +28,24 @@ def get_unique_number(
|
|||
asset: str, subset: str
|
||||
) -> str:
|
||||
"""Return a unique number based on the asset name."""
|
||||
avalon_containers = [
|
||||
c for c in bpy.data.collections
|
||||
if c.name == 'AVALON_CONTAINERS'
|
||||
]
|
||||
containers = []
|
||||
# First, add the children of avalon containers
|
||||
for c in avalon_containers:
|
||||
containers.extend(c.children)
|
||||
# then keep looping to include all the children
|
||||
for c in containers:
|
||||
containers.extend(c.children)
|
||||
container_names = [
|
||||
c.name for c in containers
|
||||
]
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_container:
|
||||
return "01"
|
||||
asset_groups = avalon_container.all_objects
|
||||
|
||||
container_names = [c.name for c in asset_groups if c.type == 'EMPTY']
|
||||
count = 1
|
||||
name = f"{asset}_{count:0>2}_{subset}_CON"
|
||||
name = f"{asset}_{count:0>2}_{subset}"
|
||||
while name in container_names:
|
||||
count += 1
|
||||
name = f"{asset}_{count:0>2}_{subset}_CON"
|
||||
name = f"{asset}_{count:0>2}_{subset}"
|
||||
return f"{count:0>2}"
|
||||
|
||||
|
||||
def prepare_data(data, container_name):
|
||||
name = data.name
|
||||
local_data = data.make_local()
|
||||
local_data.name = f"{name}:{container_name}"
|
||||
local_data.name = f"{container_name}:{name}"
|
||||
return local_data
|
||||
|
||||
|
||||
|
|
@ -102,7 +95,7 @@ def get_local_collection_with_name(name):
|
|||
return None
|
||||
|
||||
|
||||
class Creator(PypeCreatorMixin, avalon.blender.Creator):
|
||||
class Creator(PypeCreatorMixin, blender.Creator):
|
||||
pass
|
||||
|
||||
|
||||
|
|
@ -173,6 +166,16 @@ class AssetLoader(api.Loader):
|
|||
name: Optional[str] = None,
|
||||
namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None) -> Optional[bpy.types.Collection]:
|
||||
""" Run the loader on Blender main thread"""
|
||||
mti = ops.MainThreadItem(self._load, context, name, namespace, options)
|
||||
ops.execute_in_main_thread(mti)
|
||||
|
||||
def _load(self,
|
||||
context: dict,
|
||||
name: Optional[str] = None,
|
||||
namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None
|
||||
) -> Optional[bpy.types.Collection]:
|
||||
"""Load asset via database
|
||||
|
||||
Arguments:
|
||||
|
|
@ -218,16 +221,26 @@ class AssetLoader(api.Loader):
|
|||
# loader=self.__class__.__name__,
|
||||
# )
|
||||
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
instance_name = asset_name(asset, subset, unique_number) + '_CON'
|
||||
# asset = context["asset"]["name"]
|
||||
# subset = context["subset"]["name"]
|
||||
# instance_name = asset_name(asset, subset, unique_number) + '_CON'
|
||||
|
||||
return self._get_instance_collection(instance_name, nodes)
|
||||
# return self._get_instance_collection(instance_name, nodes)
|
||||
|
||||
def exec_update(self, container: Dict, representation: Dict):
|
||||
"""Must be implemented by a sub-class"""
|
||||
raise NotImplementedError("Must be implemented by a sub-class")
|
||||
|
||||
def update(self, container: Dict, representation: Dict):
|
||||
""" Run the update on Blender main thread"""
|
||||
mti = ops.MainThreadItem(self.exec_update, container, representation)
|
||||
ops.execute_in_main_thread(mti)
|
||||
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
"""Must be implemented by a sub-class"""
|
||||
raise NotImplementedError("Must be implemented by a sub-class")
|
||||
|
||||
def remove(self, container: Dict) -> bool:
|
||||
"""Must be implemented by a sub-class"""
|
||||
raise NotImplementedError("Must be implemented by a sub-class")
|
||||
""" Run the remove on Blender main thread"""
|
||||
mti = ops.MainThreadItem(self.exec_remove, container)
|
||||
ops.execute_in_main_thread(mti)
|
||||
|
|
|
|||
28
openpype/hosts/blender/hooks/pre_windows_console.py
Normal file
28
openpype/hosts/blender/hooks/pre_windows_console.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
import subprocess
|
||||
from openpype.lib import PreLaunchHook
|
||||
|
||||
|
||||
class BlenderConsoleWindows(PreLaunchHook):
|
||||
"""Foundry applications have specific way how to launch them.
|
||||
|
||||
Blender is executed "like" python process so it is required to pass
|
||||
`CREATE_NEW_CONSOLE` flag on windows to trigger creation of new console.
|
||||
At the same time the newly created console won't create it's own stdout
|
||||
and stderr handlers so they should not be redirected to DEVNULL.
|
||||
"""
|
||||
|
||||
# Should be as last hook because must change launch arguments to string
|
||||
order = 1000
|
||||
app_groups = ["blender"]
|
||||
platforms = ["windows"]
|
||||
|
||||
def execute(self):
|
||||
# Change `creationflags` to CREATE_NEW_CONSOLE
|
||||
# - on Windows will blender create new window using it's console
|
||||
# Set `stdout` and `stderr` to None so new created console does not
|
||||
# have redirected output to DEVNULL in build
|
||||
self.launch_context.kwargs.update({
|
||||
"creationflags": subprocess.CREATE_NEW_CONSOLE,
|
||||
"stdout": None,
|
||||
"stderr": None
|
||||
})
|
||||
|
|
@ -2,11 +2,13 @@
|
|||
|
||||
import bpy
|
||||
|
||||
from avalon import api, blender
|
||||
import openpype.hosts.blender.api.plugin
|
||||
from avalon import api
|
||||
from avalon.blender import lib, ops
|
||||
from avalon.blender.pipeline import AVALON_INSTANCES
|
||||
from openpype.hosts.blender.api import plugin
|
||||
|
||||
|
||||
class CreateAnimation(openpype.hosts.blender.api.plugin.Creator):
|
||||
class CreateAnimation(plugin.Creator):
|
||||
"""Animation output for character rigs"""
|
||||
|
||||
name = "animationMain"
|
||||
|
|
@ -15,16 +17,36 @@ class CreateAnimation(openpype.hosts.blender.api.plugin.Creator):
|
|||
icon = "male"
|
||||
|
||||
def process(self):
|
||||
""" Run the creator on Blender main thread"""
|
||||
mti = ops.MainThreadItem(self._process)
|
||||
ops.execute_in_main_thread(mti)
|
||||
|
||||
def _process(self):
|
||||
# Get Instance Containter or create it if it does not exist
|
||||
instances = bpy.data.collections.get(AVALON_INSTANCES)
|
||||
if not instances:
|
||||
instances = bpy.data.collections.new(name=AVALON_INSTANCES)
|
||||
bpy.context.scene.collection.children.link(instances)
|
||||
|
||||
# Create instance object
|
||||
# name = self.name
|
||||
# if not name:
|
||||
asset = self.data["asset"]
|
||||
subset = self.data["subset"]
|
||||
name = openpype.hosts.blender.api.plugin.asset_name(asset, subset)
|
||||
collection = bpy.data.collections.new(name=name)
|
||||
bpy.context.scene.collection.children.link(collection)
|
||||
name = plugin.asset_name(asset, subset)
|
||||
# asset_group = bpy.data.objects.new(name=name, object_data=None)
|
||||
# asset_group.empty_display_type = 'SINGLE_ARROW'
|
||||
asset_group = bpy.data.collections.new(name=name)
|
||||
instances.children.link(asset_group)
|
||||
self.data['task'] = api.Session.get('AVALON_TASK')
|
||||
blender.lib.imprint(collection, self.data)
|
||||
lib.imprint(asset_group, self.data)
|
||||
|
||||
if (self.options or {}).get("useSelection"):
|
||||
for obj in blender.lib.get_selection():
|
||||
collection.objects.link(obj)
|
||||
selected = lib.get_selection()
|
||||
for obj in selected:
|
||||
asset_group.objects.link(obj)
|
||||
elif (self.options or {}).get("asset_group"):
|
||||
obj = (self.options or {}).get("asset_group")
|
||||
asset_group.objects.link(obj)
|
||||
|
||||
return collection
|
||||
return asset_group
|
||||
|
|
|
|||
|
|
@ -3,11 +3,12 @@
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender import lib
|
||||
import openpype.hosts.blender.api.plugin
|
||||
from avalon.blender import lib, ops
|
||||
from avalon.blender.pipeline import AVALON_INSTANCES
|
||||
from openpype.hosts.blender.api import plugin
|
||||
|
||||
|
||||
class CreateLayout(openpype.hosts.blender.api.plugin.Creator):
|
||||
class CreateLayout(plugin.Creator):
|
||||
"""Layout output for character rigs"""
|
||||
|
||||
name = "layoutMain"
|
||||
|
|
@ -16,13 +17,34 @@ class CreateLayout(openpype.hosts.blender.api.plugin.Creator):
|
|||
icon = "cubes"
|
||||
|
||||
def process(self):
|
||||
""" Run the creator on Blender main thread"""
|
||||
mti = ops.MainThreadItem(self._process)
|
||||
ops.execute_in_main_thread(mti)
|
||||
|
||||
def _process(self):
|
||||
# Get Instance Containter or create it if it does not exist
|
||||
instances = bpy.data.collections.get(AVALON_INSTANCES)
|
||||
if not instances:
|
||||
instances = bpy.data.collections.new(name=AVALON_INSTANCES)
|
||||
bpy.context.scene.collection.children.link(instances)
|
||||
|
||||
# Create instance object
|
||||
asset = self.data["asset"]
|
||||
subset = self.data["subset"]
|
||||
name = openpype.hosts.blender.api.plugin.asset_name(asset, subset)
|
||||
collection = bpy.context.collection
|
||||
collection.name = name
|
||||
name = plugin.asset_name(asset, subset)
|
||||
asset_group = bpy.data.objects.new(name=name, object_data=None)
|
||||
asset_group.empty_display_type = 'SINGLE_ARROW'
|
||||
instances.objects.link(asset_group)
|
||||
self.data['task'] = api.Session.get('AVALON_TASK')
|
||||
lib.imprint(collection, self.data)
|
||||
lib.imprint(asset_group, self.data)
|
||||
|
||||
return collection
|
||||
# Add selected objects to instance
|
||||
if (self.options or {}).get("useSelection"):
|
||||
bpy.context.view_layer.objects.active = asset_group
|
||||
selected = lib.get_selection()
|
||||
for obj in selected:
|
||||
obj.select_set(True)
|
||||
selected.append(asset_group)
|
||||
bpy.ops.object.parent_set(keep_transform=True)
|
||||
|
||||
return asset_group
|
||||
|
|
|
|||
|
|
@ -3,11 +3,12 @@
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender import lib
|
||||
import openpype.hosts.blender.api.plugin
|
||||
from avalon.blender import lib, ops
|
||||
from avalon.blender.pipeline import AVALON_INSTANCES
|
||||
from openpype.hosts.blender.api import plugin
|
||||
|
||||
|
||||
class CreateModel(openpype.hosts.blender.api.plugin.Creator):
|
||||
class CreateModel(plugin.Creator):
|
||||
"""Polygonal static geometry"""
|
||||
|
||||
name = "modelMain"
|
||||
|
|
@ -16,17 +17,34 @@ class CreateModel(openpype.hosts.blender.api.plugin.Creator):
|
|||
icon = "cube"
|
||||
|
||||
def process(self):
|
||||
""" Run the creator on Blender main thread"""
|
||||
mti = ops.MainThreadItem(self._process)
|
||||
ops.execute_in_main_thread(mti)
|
||||
|
||||
def _process(self):
|
||||
# Get Instance Containter or create it if it does not exist
|
||||
instances = bpy.data.collections.get(AVALON_INSTANCES)
|
||||
if not instances:
|
||||
instances = bpy.data.collections.new(name=AVALON_INSTANCES)
|
||||
bpy.context.scene.collection.children.link(instances)
|
||||
|
||||
# Create instance object
|
||||
asset = self.data["asset"]
|
||||
subset = self.data["subset"]
|
||||
name = openpype.hosts.blender.api.plugin.asset_name(asset, subset)
|
||||
collection = bpy.data.collections.new(name=name)
|
||||
bpy.context.scene.collection.children.link(collection)
|
||||
name = plugin.asset_name(asset, subset)
|
||||
asset_group = bpy.data.objects.new(name=name, object_data=None)
|
||||
asset_group.empty_display_type = 'SINGLE_ARROW'
|
||||
instances.objects.link(asset_group)
|
||||
self.data['task'] = api.Session.get('AVALON_TASK')
|
||||
lib.imprint(collection, self.data)
|
||||
lib.imprint(asset_group, self.data)
|
||||
|
||||
# Add selected objects to instance
|
||||
if (self.options or {}).get("useSelection"):
|
||||
for obj in lib.get_selection():
|
||||
collection.objects.link(obj)
|
||||
bpy.context.view_layer.objects.active = asset_group
|
||||
selected = lib.get_selection()
|
||||
for obj in selected:
|
||||
obj.select_set(True)
|
||||
selected.append(asset_group)
|
||||
bpy.ops.object.parent_set(keep_transform=True)
|
||||
|
||||
return collection
|
||||
return asset_group
|
||||
|
|
|
|||
|
|
@ -3,11 +3,12 @@
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender import lib
|
||||
import openpype.hosts.blender.api.plugin
|
||||
from avalon.blender import lib, ops
|
||||
from avalon.blender.pipeline import AVALON_INSTANCES
|
||||
from openpype.hosts.blender.api import plugin
|
||||
|
||||
|
||||
class CreateRig(openpype.hosts.blender.api.plugin.Creator):
|
||||
class CreateRig(plugin.Creator):
|
||||
"""Artist-friendly rig with controls to direct motion"""
|
||||
|
||||
name = "rigMain"
|
||||
|
|
@ -16,26 +17,34 @@ class CreateRig(openpype.hosts.blender.api.plugin.Creator):
|
|||
icon = "wheelchair"
|
||||
|
||||
def process(self):
|
||||
""" Run the creator on Blender main thread"""
|
||||
mti = ops.MainThreadItem(self._process)
|
||||
ops.execute_in_main_thread(mti)
|
||||
|
||||
def _process(self):
|
||||
# Get Instance Containter or create it if it does not exist
|
||||
instances = bpy.data.collections.get(AVALON_INSTANCES)
|
||||
if not instances:
|
||||
instances = bpy.data.collections.new(name=AVALON_INSTANCES)
|
||||
bpy.context.scene.collection.children.link(instances)
|
||||
|
||||
# Create instance object
|
||||
asset = self.data["asset"]
|
||||
subset = self.data["subset"]
|
||||
name = openpype.hosts.blender.api.plugin.asset_name(asset, subset)
|
||||
collection = bpy.data.collections.new(name=name)
|
||||
bpy.context.scene.collection.children.link(collection)
|
||||
name = plugin.asset_name(asset, subset)
|
||||
asset_group = bpy.data.objects.new(name=name, object_data=None)
|
||||
asset_group.empty_display_type = 'SINGLE_ARROW'
|
||||
instances.objects.link(asset_group)
|
||||
self.data['task'] = api.Session.get('AVALON_TASK')
|
||||
lib.imprint(collection, self.data)
|
||||
|
||||
# Add the rig object and all the children meshes to
|
||||
# a set and link them all at the end to avoid duplicates.
|
||||
# Blender crashes if trying to link an object that is already linked.
|
||||
# This links automatically the children meshes if they were not
|
||||
# selected, and doesn't link them twice if they, insted,
|
||||
# were manually selected by the user.
|
||||
lib.imprint(asset_group, self.data)
|
||||
|
||||
# Add selected objects to instance
|
||||
if (self.options or {}).get("useSelection"):
|
||||
for obj in lib.get_selection():
|
||||
for child in obj.users_collection[0].children:
|
||||
collection.children.link(child)
|
||||
collection.objects.link(obj)
|
||||
bpy.context.view_layer.objects.active = asset_group
|
||||
selected = lib.get_selection()
|
||||
for obj in selected:
|
||||
obj.select_set(True)
|
||||
selected.append(asset_group)
|
||||
bpy.ops.object.parent_set(keep_transform=True)
|
||||
|
||||
return collection
|
||||
return asset_group
|
||||
|
|
|
|||
|
|
@ -4,9 +4,14 @@ from pathlib import Path
|
|||
from pprint import pformat
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from avalon import api, blender
|
||||
import bpy
|
||||
import openpype.hosts.blender.api.plugin as plugin
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender import lib
|
||||
from avalon.blender.pipeline import AVALON_CONTAINERS
|
||||
from avalon.blender.pipeline import AVALON_CONTAINER_ID
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from openpype.hosts.blender.api import plugin
|
||||
|
||||
|
||||
class CacheModelLoader(plugin.AssetLoader):
|
||||
|
|
@ -21,24 +26,30 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
families = ["model", "pointcache"]
|
||||
representations = ["abc"]
|
||||
|
||||
label = "Link Alembic"
|
||||
label = "Load Alembic"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def _remove(self, objects, container):
|
||||
for obj in list(objects):
|
||||
def _remove(self, asset_group):
|
||||
objects = list(asset_group.children)
|
||||
empties = []
|
||||
|
||||
for obj in objects:
|
||||
if obj.type == 'MESH':
|
||||
for material_slot in list(obj.material_slots):
|
||||
bpy.data.materials.remove(material_slot.material)
|
||||
bpy.data.meshes.remove(obj.data)
|
||||
elif obj.type == 'EMPTY':
|
||||
bpy.data.objects.remove(obj)
|
||||
objects.extend(obj.children)
|
||||
empties.append(obj)
|
||||
|
||||
bpy.data.collections.remove(container)
|
||||
for empty in empties:
|
||||
bpy.data.objects.remove(empty)
|
||||
|
||||
def _process(self, libpath, container_name, parent_collection):
|
||||
def _process(self, libpath, asset_group, group_name):
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
view_layer = bpy.context.view_layer
|
||||
view_layer_collection = view_layer.active_layer_collection.collection
|
||||
collection = bpy.context.view_layer.active_layer_collection.collection
|
||||
|
||||
relative = bpy.context.preferences.filepaths.use_relative_paths
|
||||
bpy.ops.wm.alembic_import(
|
||||
|
|
@ -46,34 +57,61 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
relative_path=relative
|
||||
)
|
||||
|
||||
parent = parent_collection
|
||||
parent = bpy.context.scene.collection
|
||||
|
||||
if parent is None:
|
||||
parent = bpy.context.scene.collection
|
||||
imported = lib.get_selection()
|
||||
|
||||
model_container = bpy.data.collections.new(container_name)
|
||||
parent.children.link(model_container)
|
||||
for obj in bpy.context.selected_objects:
|
||||
model_container.objects.link(obj)
|
||||
view_layer_collection.objects.unlink(obj)
|
||||
empties = [obj for obj in imported if obj.type == 'EMPTY']
|
||||
|
||||
container = None
|
||||
|
||||
for empty in empties:
|
||||
if not empty.parent:
|
||||
container = empty
|
||||
break
|
||||
|
||||
assert container, "No asset group found"
|
||||
|
||||
# Children must be linked before parents,
|
||||
# otherwise the hierarchy will break
|
||||
objects = []
|
||||
nodes = list(container.children)
|
||||
|
||||
for obj in nodes:
|
||||
obj.parent = asset_group
|
||||
|
||||
bpy.data.objects.remove(container)
|
||||
|
||||
for obj in nodes:
|
||||
objects.append(obj)
|
||||
nodes.extend(list(obj.children))
|
||||
|
||||
objects.reverse()
|
||||
|
||||
for obj in objects:
|
||||
parent.objects.link(obj)
|
||||
collection.objects.unlink(obj)
|
||||
|
||||
for obj in objects:
|
||||
name = obj.name
|
||||
obj.name = f"{name}:{container_name}"
|
||||
obj.name = f"{group_name}:{name}"
|
||||
if obj.type != 'EMPTY':
|
||||
name_data = obj.data.name
|
||||
obj.data.name = f"{group_name}:{name_data}"
|
||||
|
||||
# Groups are imported as Empty objects in Blender
|
||||
if obj.type == 'MESH':
|
||||
data_name = obj.data.name
|
||||
obj.data.name = f"{data_name}:{container_name}"
|
||||
for material_slot in obj.material_slots:
|
||||
name_mat = material_slot.material.name
|
||||
material_slot.material.name = f"{group_name}:{name_mat}"
|
||||
|
||||
if not obj.get(blender.pipeline.AVALON_PROPERTY):
|
||||
obj[blender.pipeline.AVALON_PROPERTY] = dict()
|
||||
if not obj.get(AVALON_PROPERTY):
|
||||
obj[AVALON_PROPERTY] = dict()
|
||||
|
||||
avalon_info = obj[blender.pipeline.AVALON_PROPERTY]
|
||||
avalon_info.update({"container_name": container_name})
|
||||
avalon_info = obj[AVALON_PROPERTY]
|
||||
avalon_info.update({"container_name": group_name})
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
return model_container
|
||||
return objects
|
||||
|
||||
def process_asset(
|
||||
self, context: dict, name: str, namespace: Optional[str] = None,
|
||||
|
|
@ -91,47 +129,41 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
|
||||
lib_container = plugin.asset_name(
|
||||
asset, subset
|
||||
)
|
||||
unique_number = plugin.get_unique_number(
|
||||
asset, subset
|
||||
)
|
||||
asset_name = plugin.asset_name(asset, subset)
|
||||
unique_number = plugin.get_unique_number(asset, subset)
|
||||
group_name = plugin.asset_name(asset, subset, unique_number)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
container_name = plugin.asset_name(
|
||||
asset, subset, unique_number
|
||||
)
|
||||
|
||||
container = bpy.data.collections.new(lib_container)
|
||||
container.name = container_name
|
||||
blender.pipeline.containerise_existing(
|
||||
container,
|
||||
name,
|
||||
namespace,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
)
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_container:
|
||||
avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS)
|
||||
bpy.context.scene.collection.children.link(avalon_container)
|
||||
|
||||
container_metadata = container.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
asset_group = bpy.data.objects.new(group_name, object_data=None)
|
||||
avalon_container.objects.link(asset_group)
|
||||
|
||||
container_metadata["libpath"] = libpath
|
||||
container_metadata["lib_container"] = lib_container
|
||||
objects = self._process(libpath, asset_group, group_name)
|
||||
|
||||
obj_container = self._process(
|
||||
libpath, container_name, None)
|
||||
bpy.context.scene.collection.objects.link(asset_group)
|
||||
|
||||
container_metadata["obj_container"] = obj_container
|
||||
asset_group[AVALON_PROPERTY] = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": name,
|
||||
"namespace": namespace or '',
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"libpath": libpath,
|
||||
"asset_name": asset_name,
|
||||
"parent": str(context["representation"]["parent"]),
|
||||
"family": context["representation"]["context"]["family"],
|
||||
"objectName": group_name
|
||||
}
|
||||
|
||||
# Save the list of objects in the metadata container
|
||||
container_metadata["objects"] = obj_container.all_objects
|
||||
self[:] = objects
|
||||
return objects
|
||||
|
||||
nodes = list(container.objects)
|
||||
nodes.append(container)
|
||||
self[:] = nodes
|
||||
return nodes
|
||||
|
||||
def update(self, container: Dict, representation: Dict):
|
||||
def exec_update(self, container: Dict, representation: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
|
|
@ -143,9 +175,8 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
collection = bpy.data.collections.get(
|
||||
container["objectName"]
|
||||
)
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
libpath = Path(api.get_representation_path(representation))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
|
|
@ -155,12 +186,9 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
pformat(representation, indent=2),
|
||||
)
|
||||
|
||||
assert collection, (
|
||||
assert asset_group, (
|
||||
f"The asset is not loaded: {container['objectName']}"
|
||||
)
|
||||
assert not (collection.children), (
|
||||
"Nested collections are not supported."
|
||||
)
|
||||
assert libpath, (
|
||||
"No existing library file found for {container['objectName']}"
|
||||
)
|
||||
|
|
@ -171,45 +199,34 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
f"Unsupported file: {libpath}"
|
||||
)
|
||||
|
||||
collection_metadata = collection.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
collection_libpath = collection_metadata["libpath"]
|
||||
metadata = asset_group.get(AVALON_PROPERTY)
|
||||
group_libpath = metadata["libpath"]
|
||||
|
||||
obj_container = plugin.get_local_collection_with_name(
|
||||
collection_metadata["obj_container"].name
|
||||
)
|
||||
objects = obj_container.all_objects
|
||||
|
||||
container_name = obj_container.name
|
||||
|
||||
normalized_collection_libpath = (
|
||||
str(Path(bpy.path.abspath(collection_libpath)).resolve())
|
||||
normalized_group_libpath = (
|
||||
str(Path(bpy.path.abspath(group_libpath)).resolve())
|
||||
)
|
||||
normalized_libpath = (
|
||||
str(Path(bpy.path.abspath(str(libpath))).resolve())
|
||||
)
|
||||
self.log.debug(
|
||||
"normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s",
|
||||
normalized_collection_libpath,
|
||||
"normalized_group_libpath:\n %s\nnormalized_libpath:\n %s",
|
||||
normalized_group_libpath,
|
||||
normalized_libpath,
|
||||
)
|
||||
if normalized_collection_libpath == normalized_libpath:
|
||||
if normalized_group_libpath == normalized_libpath:
|
||||
self.log.info("Library already loaded, not updating...")
|
||||
return
|
||||
|
||||
parent = plugin.get_parent_collection(obj_container)
|
||||
mat = asset_group.matrix_basis.copy()
|
||||
self._remove(asset_group)
|
||||
|
||||
self._remove(objects, obj_container)
|
||||
self._process(str(libpath), asset_group, object_name)
|
||||
asset_group.matrix_basis = mat
|
||||
|
||||
obj_container = self._process(
|
||||
str(libpath), container_name, parent)
|
||||
metadata["libpath"] = str(libpath)
|
||||
metadata["representation"] = str(representation["_id"])
|
||||
|
||||
collection_metadata["obj_container"] = obj_container
|
||||
collection_metadata["objects"] = obj_container.all_objects
|
||||
collection_metadata["libpath"] = str(libpath)
|
||||
collection_metadata["representation"] = str(representation["_id"])
|
||||
|
||||
def remove(self, container: Dict) -> bool:
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
||||
Arguments:
|
||||
|
|
@ -222,25 +239,14 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
collection = bpy.data.collections.get(
|
||||
container["objectName"]
|
||||
)
|
||||
if not collection:
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
|
||||
if not asset_group:
|
||||
return False
|
||||
assert not (collection.children), (
|
||||
"Nested collections are not supported."
|
||||
)
|
||||
|
||||
collection_metadata = collection.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
self._remove(asset_group)
|
||||
|
||||
obj_container = plugin.get_local_collection_with_name(
|
||||
collection_metadata["obj_container"].name
|
||||
)
|
||||
objects = obj_container.all_objects
|
||||
|
||||
self._remove(objects, obj_container)
|
||||
|
||||
bpy.data.collections.remove(collection)
|
||||
bpy.data.objects.remove(asset_group)
|
||||
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -1,20 +1,19 @@
|
|||
"""Load an animation in Blender."""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from avalon import api, blender
|
||||
import bpy
|
||||
import openpype.hosts.blender.api.plugin
|
||||
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from openpype.hosts.blender.api import plugin
|
||||
|
||||
|
||||
logger = logging.getLogger("openpype").getChild(
|
||||
"blender").getChild("load_animation")
|
||||
|
||||
|
||||
class BlendAnimationLoader(openpype.hosts.blender.api.plugin.AssetLoader):
|
||||
class BlendAnimationLoader(plugin.AssetLoader):
|
||||
"""Load animations from a .blend file.
|
||||
|
||||
Warning:
|
||||
|
|
@ -29,67 +28,6 @@ class BlendAnimationLoader(openpype.hosts.blender.api.plugin.AssetLoader):
|
|||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def _remove(self, objects, lib_container):
|
||||
for obj in list(objects):
|
||||
if obj.type == 'ARMATURE':
|
||||
bpy.data.armatures.remove(obj.data)
|
||||
elif obj.type == 'MESH':
|
||||
bpy.data.meshes.remove(obj.data)
|
||||
|
||||
bpy.data.collections.remove(bpy.data.collections[lib_container])
|
||||
|
||||
def _process(self, libpath, lib_container, container_name):
|
||||
|
||||
relative = bpy.context.preferences.filepaths.use_relative_paths
|
||||
with bpy.data.libraries.load(
|
||||
libpath, link=True, relative=relative
|
||||
) as (_, data_to):
|
||||
data_to.collections = [lib_container]
|
||||
|
||||
scene = bpy.context.scene
|
||||
|
||||
scene.collection.children.link(bpy.data.collections[lib_container])
|
||||
|
||||
anim_container = scene.collection.children[lib_container].make_local()
|
||||
|
||||
meshes = [obj for obj in anim_container.objects if obj.type == 'MESH']
|
||||
armatures = [
|
||||
obj for obj in anim_container.objects if obj.type == 'ARMATURE']
|
||||
|
||||
# Should check if there is only an armature?
|
||||
|
||||
objects_list = []
|
||||
|
||||
# Link meshes first, then armatures.
|
||||
# The armature is unparented for all the non-local meshes,
|
||||
# when it is made local.
|
||||
for obj in meshes + armatures:
|
||||
|
||||
obj = obj.make_local()
|
||||
|
||||
obj.data.make_local()
|
||||
|
||||
anim_data = obj.animation_data
|
||||
|
||||
if anim_data is not None and anim_data.action is not None:
|
||||
|
||||
anim_data.action.make_local()
|
||||
|
||||
if not obj.get(blender.pipeline.AVALON_PROPERTY):
|
||||
|
||||
obj[blender.pipeline.AVALON_PROPERTY] = dict()
|
||||
|
||||
avalon_info = obj[blender.pipeline.AVALON_PROPERTY]
|
||||
avalon_info.update({"container_name": container_name})
|
||||
|
||||
objects_list.append(obj)
|
||||
|
||||
anim_container.pop(blender.pipeline.AVALON_PROPERTY)
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
return objects_list
|
||||
|
||||
def process_asset(
|
||||
self, context: dict, name: str, namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None
|
||||
|
|
@ -101,148 +39,32 @@ class BlendAnimationLoader(openpype.hosts.blender.api.plugin.AssetLoader):
|
|||
context: Full parenthood of representation to load
|
||||
options: Additional settings dictionary
|
||||
"""
|
||||
|
||||
libpath = self.fname
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
lib_container = openpype.hosts.blender.api.plugin.asset_name(asset, subset)
|
||||
container_name = openpype.hosts.blender.api.plugin.asset_name(
|
||||
asset, subset, namespace
|
||||
)
|
||||
|
||||
container = bpy.data.collections.new(lib_container)
|
||||
container.name = container_name
|
||||
blender.pipeline.containerise_existing(
|
||||
container,
|
||||
name,
|
||||
namespace,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
)
|
||||
with bpy.data.libraries.load(
|
||||
libpath, link=True, relative=False
|
||||
) as (data_from, data_to):
|
||||
data_to.objects = data_from.objects
|
||||
data_to.actions = data_from.actions
|
||||
|
||||
container_metadata = container.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
container = data_to.objects[0]
|
||||
|
||||
container_metadata["libpath"] = libpath
|
||||
container_metadata["lib_container"] = lib_container
|
||||
assert container, "No asset group found"
|
||||
|
||||
objects_list = self._process(
|
||||
libpath, lib_container, container_name)
|
||||
target_namespace = container.get(AVALON_PROPERTY).get('namespace')
|
||||
|
||||
# Save the list of objects in the metadata container
|
||||
container_metadata["objects"] = objects_list
|
||||
action = data_to.actions[0].make_local().copy()
|
||||
|
||||
nodes = list(container.objects)
|
||||
nodes.append(container)
|
||||
self[:] = nodes
|
||||
return nodes
|
||||
for obj in bpy.data.objects:
|
||||
if obj.get(AVALON_PROPERTY) and obj.get(AVALON_PROPERTY).get(
|
||||
'namespace') == target_namespace:
|
||||
if obj.children[0]:
|
||||
if not obj.children[0].animation_data:
|
||||
obj.children[0].animation_data_create()
|
||||
obj.children[0].animation_data.action = action
|
||||
break
|
||||
|
||||
def update(self, container: Dict, representation: Dict):
|
||||
"""Update the loaded asset.
|
||||
bpy.data.objects.remove(container)
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
ones and add them to the collection.
|
||||
If the objects of the collection are used in another collection they
|
||||
will not be removed, only unlinked. Normally this should not be the
|
||||
case though.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
|
||||
collection = bpy.data.collections.get(
|
||||
container["objectName"]
|
||||
)
|
||||
|
||||
libpath = Path(api.get_representation_path(representation))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
logger.info(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
)
|
||||
|
||||
assert collection, (
|
||||
f"The asset is not loaded: {container['objectName']}"
|
||||
)
|
||||
assert not (collection.children), (
|
||||
"Nested collections are not supported."
|
||||
)
|
||||
assert libpath, (
|
||||
"No existing library file found for {container['objectName']}"
|
||||
)
|
||||
assert libpath.is_file(), (
|
||||
f"The file doesn't exist: {libpath}"
|
||||
)
|
||||
assert extension in openpype.hosts.blender.api.plugin.VALID_EXTENSIONS, (
|
||||
f"Unsupported file: {libpath}"
|
||||
)
|
||||
|
||||
collection_metadata = collection.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
|
||||
collection_libpath = collection_metadata["libpath"]
|
||||
normalized_collection_libpath = (
|
||||
str(Path(bpy.path.abspath(collection_libpath)).resolve())
|
||||
)
|
||||
normalized_libpath = (
|
||||
str(Path(bpy.path.abspath(str(libpath))).resolve())
|
||||
)
|
||||
logger.debug(
|
||||
"normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s",
|
||||
normalized_collection_libpath,
|
||||
normalized_libpath,
|
||||
)
|
||||
if normalized_collection_libpath == normalized_libpath:
|
||||
logger.info("Library already loaded, not updating...")
|
||||
return
|
||||
|
||||
objects = collection_metadata["objects"]
|
||||
lib_container = collection_metadata["lib_container"]
|
||||
|
||||
self._remove(objects, lib_container)
|
||||
|
||||
objects_list = self._process(
|
||||
str(libpath), lib_container, collection.name)
|
||||
|
||||
# Save the list of objects in the metadata container
|
||||
collection_metadata["objects"] = objects_list
|
||||
collection_metadata["libpath"] = str(libpath)
|
||||
collection_metadata["representation"] = str(representation["_id"])
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
def remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
||||
Arguments:
|
||||
container (openpype:container-1.0): Container to remove,
|
||||
from `host.ls()`.
|
||||
|
||||
Returns:
|
||||
bool: Whether the container was deleted.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
|
||||
collection = bpy.data.collections.get(
|
||||
container["objectName"]
|
||||
)
|
||||
if not collection:
|
||||
return False
|
||||
assert not (collection.children), (
|
||||
"Nested collections are not supported."
|
||||
)
|
||||
|
||||
collection_metadata = collection.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
objects = collection_metadata["objects"]
|
||||
lib_container = collection_metadata["lib_container"]
|
||||
|
||||
self._remove(objects, lib_container)
|
||||
|
||||
bpy.data.collections.remove(collection)
|
||||
|
||||
return True
|
||||
library = bpy.data.libraries.get(bpy.path.basename(libpath))
|
||||
bpy.data.libraries.remove(library)
|
||||
|
|
|
|||
273
openpype/hosts/blender/plugins/load/load_fbx.py
Normal file
273
openpype/hosts/blender/plugins/load/load_fbx.py
Normal file
|
|
@ -0,0 +1,273 @@
|
|||
"""Load an asset in Blender from an Alembic file."""
|
||||
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender import lib
|
||||
from avalon.blender.pipeline import AVALON_CONTAINERS
|
||||
from avalon.blender.pipeline import AVALON_CONTAINER_ID
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from openpype.hosts.blender.api import plugin
|
||||
|
||||
|
||||
class FbxModelLoader(plugin.AssetLoader):
|
||||
"""Load FBX models.
|
||||
|
||||
Stores the imported asset in an empty named after the asset.
|
||||
"""
|
||||
|
||||
families = ["model", "rig"]
|
||||
representations = ["fbx"]
|
||||
|
||||
label = "Load FBX"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def _remove(self, asset_group):
|
||||
objects = list(asset_group.children)
|
||||
|
||||
for obj in objects:
|
||||
if obj.type == 'MESH':
|
||||
for material_slot in list(obj.material_slots):
|
||||
if material_slot.material:
|
||||
bpy.data.materials.remove(material_slot.material)
|
||||
bpy.data.meshes.remove(obj.data)
|
||||
elif obj.type == 'ARMATURE':
|
||||
objects.extend(obj.children)
|
||||
bpy.data.armatures.remove(obj.data)
|
||||
elif obj.type == 'CURVE':
|
||||
bpy.data.curves.remove(obj.data)
|
||||
elif obj.type == 'EMPTY':
|
||||
objects.extend(obj.children)
|
||||
bpy.data.objects.remove(obj)
|
||||
|
||||
def _process(self, libpath, asset_group, group_name, action):
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
collection = bpy.context.view_layer.active_layer_collection.collection
|
||||
|
||||
bpy.ops.import_scene.fbx(filepath=libpath)
|
||||
|
||||
parent = bpy.context.scene.collection
|
||||
|
||||
imported = lib.get_selection()
|
||||
|
||||
empties = [obj for obj in imported if obj.type == 'EMPTY']
|
||||
|
||||
container = None
|
||||
|
||||
for empty in empties:
|
||||
if not empty.parent:
|
||||
container = empty
|
||||
break
|
||||
|
||||
assert container, "No asset group found"
|
||||
|
||||
# Children must be linked before parents,
|
||||
# otherwise the hierarchy will break
|
||||
objects = []
|
||||
nodes = list(container.children)
|
||||
|
||||
for obj in nodes:
|
||||
obj.parent = asset_group
|
||||
|
||||
bpy.data.objects.remove(container)
|
||||
|
||||
for obj in nodes:
|
||||
objects.append(obj)
|
||||
nodes.extend(list(obj.children))
|
||||
|
||||
objects.reverse()
|
||||
|
||||
for obj in objects:
|
||||
parent.objects.link(obj)
|
||||
collection.objects.unlink(obj)
|
||||
|
||||
for obj in objects:
|
||||
name = obj.name
|
||||
obj.name = f"{group_name}:{name}"
|
||||
if obj.type != 'EMPTY':
|
||||
name_data = obj.data.name
|
||||
obj.data.name = f"{group_name}:{name_data}"
|
||||
|
||||
if obj.type == 'MESH':
|
||||
for material_slot in obj.material_slots:
|
||||
name_mat = material_slot.material.name
|
||||
material_slot.material.name = f"{group_name}:{name_mat}"
|
||||
elif obj.type == 'ARMATURE':
|
||||
anim_data = obj.animation_data
|
||||
if action is not None:
|
||||
anim_data.action = action
|
||||
elif anim_data.action is not None:
|
||||
name_action = anim_data.action.name
|
||||
anim_data.action.name = f"{group_name}:{name_action}"
|
||||
|
||||
if not obj.get(AVALON_PROPERTY):
|
||||
obj[AVALON_PROPERTY] = dict()
|
||||
|
||||
avalon_info = obj[AVALON_PROPERTY]
|
||||
avalon_info.update({"container_name": group_name})
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
return objects
|
||||
|
||||
def process_asset(
|
||||
self, context: dict, name: str, namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None
|
||||
) -> Optional[List]:
|
||||
"""
|
||||
Arguments:
|
||||
name: Use pre-defined name
|
||||
namespace: Use pre-defined namespace
|
||||
context: Full parenthood of representation to load
|
||||
options: Additional settings dictionary
|
||||
"""
|
||||
libpath = self.fname
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
|
||||
asset_name = plugin.asset_name(asset, subset)
|
||||
unique_number = plugin.get_unique_number(asset, subset)
|
||||
group_name = plugin.asset_name(asset, subset, unique_number)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_container:
|
||||
avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS)
|
||||
bpy.context.scene.collection.children.link(avalon_container)
|
||||
|
||||
asset_group = bpy.data.objects.new(group_name, object_data=None)
|
||||
avalon_container.objects.link(asset_group)
|
||||
|
||||
objects = self._process(libpath, asset_group, group_name, None)
|
||||
|
||||
objects = []
|
||||
nodes = list(asset_group.children)
|
||||
|
||||
for obj in nodes:
|
||||
objects.append(obj)
|
||||
nodes.extend(list(obj.children))
|
||||
|
||||
bpy.context.scene.collection.objects.link(asset_group)
|
||||
|
||||
asset_group[AVALON_PROPERTY] = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": name,
|
||||
"namespace": namespace or '',
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"libpath": libpath,
|
||||
"asset_name": asset_name,
|
||||
"parent": str(context["representation"]["parent"]),
|
||||
"family": context["representation"]["context"]["family"],
|
||||
"objectName": group_name
|
||||
}
|
||||
|
||||
self[:] = objects
|
||||
return objects
|
||||
|
||||
def exec_update(self, container: Dict, representation: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
ones and add them to the collection.
|
||||
If the objects of the collection are used in another collection they
|
||||
will not be removed, only unlinked. Normally this should not be the
|
||||
case though.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
libpath = Path(api.get_representation_path(representation))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
self.log.info(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
)
|
||||
|
||||
assert asset_group, (
|
||||
f"The asset is not loaded: {container['objectName']}"
|
||||
)
|
||||
assert libpath, (
|
||||
"No existing library file found for {container['objectName']}"
|
||||
)
|
||||
assert libpath.is_file(), (
|
||||
f"The file doesn't exist: {libpath}"
|
||||
)
|
||||
assert extension in plugin.VALID_EXTENSIONS, (
|
||||
f"Unsupported file: {libpath}"
|
||||
)
|
||||
|
||||
metadata = asset_group.get(AVALON_PROPERTY)
|
||||
group_libpath = metadata["libpath"]
|
||||
|
||||
normalized_group_libpath = (
|
||||
str(Path(bpy.path.abspath(group_libpath)).resolve())
|
||||
)
|
||||
normalized_libpath = (
|
||||
str(Path(bpy.path.abspath(str(libpath))).resolve())
|
||||
)
|
||||
self.log.debug(
|
||||
"normalized_group_libpath:\n %s\nnormalized_libpath:\n %s",
|
||||
normalized_group_libpath,
|
||||
normalized_libpath,
|
||||
)
|
||||
if normalized_group_libpath == normalized_libpath:
|
||||
self.log.info("Library already loaded, not updating...")
|
||||
return
|
||||
|
||||
# Get the armature of the rig
|
||||
objects = asset_group.children
|
||||
armatures = [obj for obj in objects if obj.type == 'ARMATURE']
|
||||
action = None
|
||||
|
||||
if armatures:
|
||||
armature = armatures[0]
|
||||
|
||||
if armature.animation_data and armature.animation_data.action:
|
||||
action = armature.animation_data.action
|
||||
|
||||
mat = asset_group.matrix_basis.copy()
|
||||
self._remove(asset_group)
|
||||
|
||||
self._process(str(libpath), asset_group, object_name, action)
|
||||
|
||||
asset_group.matrix_basis = mat
|
||||
|
||||
metadata["libpath"] = str(libpath)
|
||||
metadata["representation"] = str(representation["_id"])
|
||||
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
||||
Arguments:
|
||||
container (openpype:container-1.0): Container to remove,
|
||||
from `host.ls()`.
|
||||
|
||||
Returns:
|
||||
bool: Whether the container was deleted.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
|
||||
if not asset_group:
|
||||
return False
|
||||
|
||||
self._remove(asset_group)
|
||||
|
||||
bpy.data.objects.remove(asset_group)
|
||||
|
||||
return True
|
||||
|
|
@ -1,664 +0,0 @@
|
|||
"""Load a layout in Blender."""
|
||||
|
||||
import json
|
||||
from logging import log, warning
|
||||
import math
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from avalon import api, blender, pipeline
|
||||
import bpy
|
||||
import openpype.hosts.blender.api.plugin as plugin
|
||||
from openpype.lib import get_creator_by_name
|
||||
|
||||
|
||||
class BlendLayoutLoader(plugin.AssetLoader):
|
||||
"""Load layout from a .blend file."""
|
||||
|
||||
families = ["layout"]
|
||||
representations = ["blend"]
|
||||
|
||||
label = "Link Layout"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def _remove(self, objects, obj_container):
|
||||
for obj in list(objects):
|
||||
if obj.type == 'ARMATURE':
|
||||
bpy.data.armatures.remove(obj.data)
|
||||
elif obj.type == 'MESH':
|
||||
bpy.data.meshes.remove(obj.data)
|
||||
elif obj.type == 'CAMERA':
|
||||
bpy.data.cameras.remove(obj.data)
|
||||
elif obj.type == 'CURVE':
|
||||
bpy.data.curves.remove(obj.data)
|
||||
|
||||
for element_container in obj_container.children:
|
||||
for child in element_container.children:
|
||||
bpy.data.collections.remove(child)
|
||||
bpy.data.collections.remove(element_container)
|
||||
|
||||
bpy.data.collections.remove(obj_container)
|
||||
|
||||
def _process(self, libpath, lib_container, container_name, actions):
|
||||
relative = bpy.context.preferences.filepaths.use_relative_paths
|
||||
with bpy.data.libraries.load(
|
||||
libpath, link=True, relative=relative
|
||||
) as (_, data_to):
|
||||
data_to.collections = [lib_container]
|
||||
|
||||
scene = bpy.context.scene
|
||||
|
||||
scene.collection.children.link(bpy.data.collections[lib_container])
|
||||
|
||||
layout_container = scene.collection.children[lib_container].make_local()
|
||||
layout_container.name = container_name
|
||||
|
||||
objects_local_types = ['MESH', 'CAMERA', 'CURVE']
|
||||
|
||||
objects = []
|
||||
armatures = []
|
||||
|
||||
containers = list(layout_container.children)
|
||||
|
||||
for container in layout_container.children:
|
||||
if container.name == blender.pipeline.AVALON_CONTAINERS:
|
||||
containers.remove(container)
|
||||
|
||||
for container in containers:
|
||||
container.make_local()
|
||||
objects.extend([
|
||||
obj for obj in container.objects
|
||||
if obj.type in objects_local_types
|
||||
])
|
||||
armatures.extend([
|
||||
obj for obj in container.objects
|
||||
if obj.type == 'ARMATURE'
|
||||
])
|
||||
containers.extend(list(container.children))
|
||||
|
||||
# Link meshes first, then armatures.
|
||||
# The armature is unparented for all the non-local meshes,
|
||||
# when it is made local.
|
||||
for obj in objects + armatures:
|
||||
local_obj = obj.make_local()
|
||||
if obj.data:
|
||||
obj.data.make_local()
|
||||
|
||||
if not local_obj.get(blender.pipeline.AVALON_PROPERTY):
|
||||
local_obj[blender.pipeline.AVALON_PROPERTY] = dict()
|
||||
|
||||
avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY]
|
||||
avalon_info.update({"container_name": container_name})
|
||||
|
||||
action = actions.get(local_obj.name, None)
|
||||
|
||||
if local_obj.type == 'ARMATURE' and action is not None:
|
||||
local_obj.animation_data.action = action
|
||||
|
||||
layout_container.pop(blender.pipeline.AVALON_PROPERTY)
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
return layout_container
|
||||
|
||||
def process_asset(
|
||||
self, context: dict, name: str, namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None
|
||||
) -> Optional[List]:
|
||||
"""
|
||||
Arguments:
|
||||
name: Use pre-defined name
|
||||
namespace: Use pre-defined namespace
|
||||
context: Full parenthood of representation to load
|
||||
options: Additional settings dictionary
|
||||
"""
|
||||
|
||||
libpath = self.fname
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
lib_container = plugin.asset_name(
|
||||
asset, subset
|
||||
)
|
||||
unique_number = plugin.get_unique_number(
|
||||
asset, subset
|
||||
)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
container_name = plugin.asset_name(
|
||||
asset, subset, unique_number
|
||||
)
|
||||
|
||||
container = bpy.data.collections.new(lib_container)
|
||||
container.name = container_name
|
||||
blender.pipeline.containerise_existing(
|
||||
container,
|
||||
name,
|
||||
namespace,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
)
|
||||
|
||||
container_metadata = container.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
|
||||
container_metadata["libpath"] = libpath
|
||||
container_metadata["lib_container"] = lib_container
|
||||
|
||||
obj_container = self._process(
|
||||
libpath, lib_container, container_name, {})
|
||||
|
||||
container_metadata["obj_container"] = obj_container
|
||||
|
||||
# Save the list of objects in the metadata container
|
||||
container_metadata["objects"] = obj_container.all_objects
|
||||
|
||||
# nodes = list(container.objects)
|
||||
# nodes.append(container)
|
||||
nodes = [container]
|
||||
self[:] = nodes
|
||||
return nodes
|
||||
|
||||
def update(self, container: Dict, representation: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
ones and add them to the collection.
|
||||
If the objects of the collection are used in another collection they
|
||||
will not be removed, only unlinked. Normally this should not be the
|
||||
case though.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
collection = bpy.data.collections.get(
|
||||
container["objectName"]
|
||||
)
|
||||
|
||||
libpath = Path(api.get_representation_path(representation))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
self.log.info(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
)
|
||||
|
||||
assert collection, (
|
||||
f"The asset is not loaded: {container['objectName']}"
|
||||
)
|
||||
assert not (collection.children), (
|
||||
"Nested collections are not supported."
|
||||
)
|
||||
assert libpath, (
|
||||
"No existing library file found for {container['objectName']}"
|
||||
)
|
||||
assert libpath.is_file(), (
|
||||
f"The file doesn't exist: {libpath}"
|
||||
)
|
||||
assert extension in plugin.VALID_EXTENSIONS, (
|
||||
f"Unsupported file: {libpath}"
|
||||
)
|
||||
|
||||
collection_metadata = collection.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
collection_libpath = collection_metadata["libpath"]
|
||||
objects = collection_metadata["objects"]
|
||||
lib_container = collection_metadata["lib_container"]
|
||||
obj_container = collection_metadata["obj_container"]
|
||||
|
||||
normalized_collection_libpath = (
|
||||
str(Path(bpy.path.abspath(collection_libpath)).resolve())
|
||||
)
|
||||
normalized_libpath = (
|
||||
str(Path(bpy.path.abspath(str(libpath))).resolve())
|
||||
)
|
||||
self.log.debug(
|
||||
"normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s",
|
||||
normalized_collection_libpath,
|
||||
normalized_libpath,
|
||||
)
|
||||
if normalized_collection_libpath == normalized_libpath:
|
||||
self.log.info("Library already loaded, not updating...")
|
||||
return
|
||||
|
||||
actions = {}
|
||||
|
||||
for obj in objects:
|
||||
if obj.type == 'ARMATURE':
|
||||
if obj.animation_data and obj.animation_data.action:
|
||||
actions[obj.name] = obj.animation_data.action
|
||||
|
||||
self._remove(objects, obj_container)
|
||||
|
||||
obj_container = self._process(
|
||||
str(libpath), lib_container, collection.name, actions)
|
||||
|
||||
# Save the list of objects in the metadata container
|
||||
collection_metadata["obj_container"] = obj_container
|
||||
collection_metadata["objects"] = obj_container.all_objects
|
||||
collection_metadata["libpath"] = str(libpath)
|
||||
collection_metadata["representation"] = str(representation["_id"])
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
def remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
||||
Arguments:
|
||||
container (openpype:container-1.0): Container to remove,
|
||||
from `host.ls()`.
|
||||
|
||||
Returns:
|
||||
bool: Whether the container was deleted.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
|
||||
collection = bpy.data.collections.get(
|
||||
container["objectName"]
|
||||
)
|
||||
if not collection:
|
||||
return False
|
||||
assert not (collection.children), (
|
||||
"Nested collections are not supported."
|
||||
)
|
||||
|
||||
collection_metadata = collection.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
objects = collection_metadata["objects"]
|
||||
obj_container = collection_metadata["obj_container"]
|
||||
|
||||
self._remove(objects, obj_container)
|
||||
|
||||
bpy.data.collections.remove(collection)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class UnrealLayoutLoader(plugin.AssetLoader):
|
||||
"""Load layout published from Unreal."""
|
||||
|
||||
families = ["layout"]
|
||||
representations = ["json"]
|
||||
|
||||
label = "Link Layout"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
animation_creator_name = "CreateAnimation"
|
||||
|
||||
def _remove_objects(self, objects):
|
||||
for obj in list(objects):
|
||||
if obj.type == 'ARMATURE':
|
||||
bpy.data.armatures.remove(obj.data)
|
||||
elif obj.type == 'MESH':
|
||||
bpy.data.meshes.remove(obj.data)
|
||||
elif obj.type == 'CAMERA':
|
||||
bpy.data.cameras.remove(obj.data)
|
||||
elif obj.type == 'CURVE':
|
||||
bpy.data.curves.remove(obj.data)
|
||||
else:
|
||||
self.log.error(
|
||||
f"Object {obj.name} of type {obj.type} not recognized.")
|
||||
|
||||
def _remove_collections(self, collection):
|
||||
if collection.children:
|
||||
for child in collection.children:
|
||||
self._remove_collections(child)
|
||||
bpy.data.collections.remove(child)
|
||||
|
||||
def _remove(self, layout_container):
|
||||
layout_container_metadata = layout_container.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
|
||||
if layout_container.children:
|
||||
for child in layout_container.children:
|
||||
child_container = child.get(blender.pipeline.AVALON_PROPERTY)
|
||||
child_container['objectName'] = child.name
|
||||
api.remove(child_container)
|
||||
|
||||
for c in bpy.data.collections:
|
||||
metadata = c.get('avalon')
|
||||
if metadata:
|
||||
print("metadata.get('id')")
|
||||
print(metadata.get('id'))
|
||||
if metadata and metadata.get('id') == 'pyblish.avalon.instance':
|
||||
print("metadata.get('dependencies')")
|
||||
print(metadata.get('dependencies'))
|
||||
print("layout_container_metadata.get('representation')")
|
||||
print(layout_container_metadata.get('representation'))
|
||||
if metadata.get('dependencies') == layout_container_metadata.get('representation'):
|
||||
|
||||
for child in c.children:
|
||||
bpy.data.collections.remove(child)
|
||||
bpy.data.collections.remove(c)
|
||||
break
|
||||
|
||||
def _get_loader(self, loaders, family):
|
||||
name = ""
|
||||
if family == 'rig':
|
||||
name = "BlendRigLoader"
|
||||
elif family == 'model':
|
||||
name = "BlendModelLoader"
|
||||
|
||||
if name == "":
|
||||
return None
|
||||
|
||||
for loader in loaders:
|
||||
if loader.__name__ == name:
|
||||
return loader
|
||||
|
||||
return None
|
||||
|
||||
def set_transform(self, obj, transform):
|
||||
location = transform.get('translation')
|
||||
rotation = transform.get('rotation')
|
||||
scale = transform.get('scale')
|
||||
|
||||
# Y position is inverted in sign because Unreal and Blender have the
|
||||
# Y axis mirrored
|
||||
obj.location = (
|
||||
location.get('x'),
|
||||
location.get('y'),
|
||||
location.get('z')
|
||||
)
|
||||
obj.rotation_euler = (
|
||||
rotation.get('x'),
|
||||
rotation.get('y'),
|
||||
rotation.get('z')
|
||||
)
|
||||
obj.scale = (
|
||||
scale.get('x'),
|
||||
scale.get('y'),
|
||||
scale.get('z')
|
||||
)
|
||||
|
||||
def _process(
|
||||
self, libpath, layout_container, container_name, representation,
|
||||
actions, parent_collection
|
||||
):
|
||||
with open(libpath, "r") as fp:
|
||||
data = json.load(fp)
|
||||
|
||||
scene = bpy.context.scene
|
||||
layout_collection = bpy.data.collections.new(container_name)
|
||||
scene.collection.children.link(layout_collection)
|
||||
|
||||
parent = parent_collection
|
||||
|
||||
if parent is None:
|
||||
parent = scene.collection
|
||||
|
||||
all_loaders = api.discover(api.Loader)
|
||||
|
||||
avalon_container = bpy.data.collections.get(
|
||||
blender.pipeline.AVALON_CONTAINERS)
|
||||
|
||||
for element in data:
|
||||
reference = element.get('reference')
|
||||
family = element.get('family')
|
||||
|
||||
loaders = api.loaders_from_representation(all_loaders, reference)
|
||||
loader = self._get_loader(loaders, family)
|
||||
|
||||
if not loader:
|
||||
continue
|
||||
|
||||
instance_name = element.get('instance_name')
|
||||
|
||||
element_container = api.load(
|
||||
loader,
|
||||
reference,
|
||||
namespace=instance_name
|
||||
)
|
||||
|
||||
if not element_container:
|
||||
continue
|
||||
|
||||
avalon_container.children.unlink(element_container)
|
||||
layout_container.children.link(element_container)
|
||||
|
||||
element_metadata = element_container.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
|
||||
# Unlink the object's collection from the scene collection and
|
||||
# link it in the layout collection
|
||||
element_collection = element_metadata.get('obj_container')
|
||||
scene.collection.children.unlink(element_collection)
|
||||
layout_collection.children.link(element_collection)
|
||||
|
||||
objects = element_metadata.get('objects')
|
||||
element_metadata['instance_name'] = instance_name
|
||||
|
||||
objects_to_transform = []
|
||||
|
||||
creator_plugin = get_creator_by_name(self.animation_creator_name)
|
||||
if not creator_plugin:
|
||||
raise ValueError("Creator plugin \"{}\" was not found.".format(
|
||||
self.animation_creator_name
|
||||
))
|
||||
|
||||
if family == 'rig':
|
||||
for o in objects:
|
||||
if o.type == 'ARMATURE':
|
||||
objects_to_transform.append(o)
|
||||
# Create an animation subset for each rig
|
||||
o.select_set(True)
|
||||
asset = api.Session["AVALON_ASSET"]
|
||||
c = api.create(
|
||||
creator_plugin,
|
||||
name="animation_" + element_collection.name,
|
||||
asset=asset,
|
||||
options={"useSelection": True},
|
||||
data={"dependencies": representation})
|
||||
scene.collection.children.unlink(c)
|
||||
parent.children.link(c)
|
||||
o.select_set(False)
|
||||
break
|
||||
elif family == 'model':
|
||||
objects_to_transform = objects
|
||||
|
||||
for o in objects_to_transform:
|
||||
self.set_transform(o, element.get('transform'))
|
||||
|
||||
if actions:
|
||||
if o.type == 'ARMATURE':
|
||||
action = actions.get(instance_name, None)
|
||||
|
||||
if action:
|
||||
if o.animation_data is None:
|
||||
o.animation_data_create()
|
||||
o.animation_data.action = action
|
||||
|
||||
return layout_collection
|
||||
|
||||
def process_asset(self,
|
||||
context: dict,
|
||||
name: str,
|
||||
namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None):
|
||||
"""
|
||||
Arguments:
|
||||
name: Use pre-defined name
|
||||
namespace: Use pre-defined namespace
|
||||
context: Full parenthood of representation to load
|
||||
options: Additional settings dictionary
|
||||
"""
|
||||
libpath = self.fname
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
lib_container = plugin.asset_name(
|
||||
asset, subset
|
||||
)
|
||||
unique_number = plugin.get_unique_number(
|
||||
asset, subset
|
||||
)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
container_name = plugin.asset_name(
|
||||
asset, subset, unique_number
|
||||
)
|
||||
|
||||
layout_container = bpy.data.collections.new(container_name)
|
||||
blender.pipeline.containerise_existing(
|
||||
layout_container,
|
||||
name,
|
||||
namespace,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
)
|
||||
|
||||
container_metadata = layout_container.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
|
||||
container_metadata["libpath"] = libpath
|
||||
container_metadata["lib_container"] = lib_container
|
||||
|
||||
layout_collection = self._process(
|
||||
libpath, layout_container, container_name,
|
||||
str(context["representation"]["_id"]), None, None)
|
||||
|
||||
container_metadata["obj_container"] = layout_collection
|
||||
|
||||
# Save the list of objects in the metadata container
|
||||
container_metadata["objects"] = layout_collection.all_objects
|
||||
|
||||
nodes = [layout_container]
|
||||
self[:] = nodes
|
||||
return nodes
|
||||
|
||||
def update(self, container: Dict, representation: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
ones and add them to the collection.
|
||||
If the objects of the collection are used in another collection they
|
||||
will not be removed, only unlinked. Normally this should not be the
|
||||
case though.
|
||||
"""
|
||||
layout_container = bpy.data.collections.get(
|
||||
container["objectName"]
|
||||
)
|
||||
if not layout_container:
|
||||
return False
|
||||
|
||||
libpath = Path(api.get_representation_path(representation))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
self.log.info(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
)
|
||||
|
||||
assert layout_container, (
|
||||
f"The asset is not loaded: {container['objectName']}"
|
||||
)
|
||||
assert libpath, (
|
||||
"No existing library file found for {container['objectName']}"
|
||||
)
|
||||
assert libpath.is_file(), (
|
||||
f"The file doesn't exist: {libpath}"
|
||||
)
|
||||
assert extension in plugin.VALID_EXTENSIONS, (
|
||||
f"Unsupported file: {libpath}"
|
||||
)
|
||||
|
||||
layout_container_metadata = layout_container.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
collection_libpath = layout_container_metadata["libpath"]
|
||||
lib_container = layout_container_metadata["lib_container"]
|
||||
obj_container = plugin.get_local_collection_with_name(
|
||||
layout_container_metadata["obj_container"].name
|
||||
)
|
||||
objects = obj_container.all_objects
|
||||
|
||||
container_name = obj_container.name
|
||||
|
||||
normalized_collection_libpath = (
|
||||
str(Path(bpy.path.abspath(collection_libpath)).resolve())
|
||||
)
|
||||
normalized_libpath = (
|
||||
str(Path(bpy.path.abspath(str(libpath))).resolve())
|
||||
)
|
||||
self.log.debug(
|
||||
"normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s",
|
||||
normalized_collection_libpath,
|
||||
normalized_libpath,
|
||||
)
|
||||
if normalized_collection_libpath == normalized_libpath:
|
||||
self.log.info("Library already loaded, not updating...")
|
||||
return
|
||||
|
||||
actions = {}
|
||||
|
||||
for obj in objects:
|
||||
if obj.type == 'ARMATURE':
|
||||
if obj.animation_data and obj.animation_data.action:
|
||||
obj_cont_name = obj.get(
|
||||
blender.pipeline.AVALON_PROPERTY).get('container_name')
|
||||
obj_cont = plugin.get_local_collection_with_name(
|
||||
obj_cont_name)
|
||||
element_metadata = obj_cont.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
instance_name = element_metadata.get('instance_name')
|
||||
actions[instance_name] = obj.animation_data.action
|
||||
|
||||
self._remove(layout_container)
|
||||
|
||||
bpy.data.collections.remove(obj_container)
|
||||
|
||||
creator_plugin = get_creator_by_name(self.setdress_creator_name)
|
||||
if not creator_plugin:
|
||||
raise ValueError("Creator plugin \"{}\" was not found.".format(
|
||||
self.setdress_creator_name
|
||||
))
|
||||
|
||||
parent = api.create(
|
||||
creator_plugin,
|
||||
name="animation",
|
||||
asset=api.Session["AVALON_ASSET"],
|
||||
options={"useSelection": True},
|
||||
data={"dependencies": str(representation["_id"])})
|
||||
|
||||
layout_collection = self._process(
|
||||
libpath, layout_container, container_name,
|
||||
str(representation["_id"]), actions, parent)
|
||||
|
||||
layout_container_metadata["obj_container"] = layout_collection
|
||||
layout_container_metadata["objects"] = layout_collection.all_objects
|
||||
layout_container_metadata["libpath"] = str(libpath)
|
||||
layout_container_metadata["representation"] = str(
|
||||
representation["_id"])
|
||||
|
||||
def remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
||||
Arguments:
|
||||
container (openpype:container-1.0): Container to remove,
|
||||
from `host.ls()`.
|
||||
|
||||
Returns:
|
||||
bool: Whether the container was deleted.
|
||||
"""
|
||||
layout_container = bpy.data.collections.get(
|
||||
container["objectName"]
|
||||
)
|
||||
if not layout_container:
|
||||
return False
|
||||
|
||||
layout_container_metadata = layout_container.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
obj_container = plugin.get_local_collection_with_name(
|
||||
layout_container_metadata["obj_container"].name
|
||||
)
|
||||
|
||||
self._remove(layout_container)
|
||||
|
||||
bpy.data.collections.remove(obj_container)
|
||||
bpy.data.collections.remove(layout_container)
|
||||
|
||||
return True
|
||||
337
openpype/hosts/blender/plugins/load/load_layout_blend.py
Normal file
337
openpype/hosts/blender/plugins/load/load_layout_blend.py
Normal file
|
|
@ -0,0 +1,337 @@
|
|||
"""Load a layout in Blender."""
|
||||
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender.pipeline import AVALON_CONTAINERS
|
||||
from avalon.blender.pipeline import AVALON_CONTAINER_ID
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from openpype.hosts.blender.api import plugin
|
||||
|
||||
|
||||
class BlendLayoutLoader(plugin.AssetLoader):
|
||||
"""Load layout from a .blend file."""
|
||||
|
||||
families = ["layout"]
|
||||
representations = ["blend"]
|
||||
|
||||
label = "Link Layout"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def _remove(self, asset_group):
|
||||
objects = list(asset_group.children)
|
||||
|
||||
for obj in objects:
|
||||
if obj.type == 'MESH':
|
||||
for material_slot in list(obj.material_slots):
|
||||
if material_slot.material:
|
||||
bpy.data.materials.remove(material_slot.material)
|
||||
bpy.data.meshes.remove(obj.data)
|
||||
elif obj.type == 'ARMATURE':
|
||||
objects.extend(obj.children)
|
||||
bpy.data.armatures.remove(obj.data)
|
||||
elif obj.type == 'CURVE':
|
||||
bpy.data.curves.remove(obj.data)
|
||||
elif obj.type == 'EMPTY':
|
||||
objects.extend(obj.children)
|
||||
bpy.data.objects.remove(obj)
|
||||
|
||||
def _remove_asset_and_library(self, asset_group):
|
||||
libpath = asset_group.get(AVALON_PROPERTY).get('libpath')
|
||||
|
||||
# Check how many assets use the same library
|
||||
count = 0
|
||||
for obj in bpy.data.collections.get(AVALON_CONTAINERS).all_objects:
|
||||
if obj.get(AVALON_PROPERTY).get('libpath') == libpath:
|
||||
count += 1
|
||||
|
||||
self._remove(asset_group)
|
||||
|
||||
bpy.data.objects.remove(asset_group)
|
||||
|
||||
# If it is the last object to use that library, remove it
|
||||
if count == 1:
|
||||
library = bpy.data.libraries.get(bpy.path.basename(libpath))
|
||||
bpy.data.libraries.remove(library)
|
||||
|
||||
def _process(self, libpath, asset_group, group_name, actions):
|
||||
with bpy.data.libraries.load(
|
||||
libpath, link=True, relative=False
|
||||
) as (data_from, data_to):
|
||||
data_to.objects = data_from.objects
|
||||
|
||||
parent = bpy.context.scene.collection
|
||||
|
||||
empties = [obj for obj in data_to.objects if obj.type == 'EMPTY']
|
||||
|
||||
container = None
|
||||
|
||||
for empty in empties:
|
||||
if empty.get(AVALON_PROPERTY):
|
||||
container = empty
|
||||
break
|
||||
|
||||
assert container, "No asset group found"
|
||||
|
||||
# Children must be linked before parents,
|
||||
# otherwise the hierarchy will break
|
||||
objects = []
|
||||
nodes = list(container.children)
|
||||
|
||||
for obj in nodes:
|
||||
obj.parent = asset_group
|
||||
|
||||
for obj in nodes:
|
||||
objects.append(obj)
|
||||
nodes.extend(list(obj.children))
|
||||
|
||||
objects.reverse()
|
||||
|
||||
constraints = []
|
||||
|
||||
armatures = [obj for obj in objects if obj.type == 'ARMATURE']
|
||||
|
||||
for armature in armatures:
|
||||
for bone in armature.pose.bones:
|
||||
for constraint in bone.constraints:
|
||||
if hasattr(constraint, 'target'):
|
||||
constraints.append(constraint)
|
||||
|
||||
for obj in objects:
|
||||
parent.objects.link(obj)
|
||||
|
||||
for obj in objects:
|
||||
local_obj = plugin.prepare_data(obj, group_name)
|
||||
|
||||
action = None
|
||||
|
||||
if actions:
|
||||
action = actions.get(local_obj.name, None)
|
||||
|
||||
if local_obj.type == 'MESH':
|
||||
plugin.prepare_data(local_obj.data, group_name)
|
||||
|
||||
if obj != local_obj:
|
||||
for constraint in constraints:
|
||||
if constraint.target == obj:
|
||||
constraint.target = local_obj
|
||||
|
||||
for material_slot in local_obj.material_slots:
|
||||
if material_slot.material:
|
||||
plugin.prepare_data(material_slot.material, group_name)
|
||||
elif local_obj.type == 'ARMATURE':
|
||||
plugin.prepare_data(local_obj.data, group_name)
|
||||
|
||||
if action is not None:
|
||||
local_obj.animation_data.action = action
|
||||
elif local_obj.animation_data.action is not None:
|
||||
plugin.prepare_data(
|
||||
local_obj.animation_data.action, group_name)
|
||||
|
||||
# Set link the drivers to the local object
|
||||
if local_obj.data.animation_data:
|
||||
for d in local_obj.data.animation_data.drivers:
|
||||
for v in d.driver.variables:
|
||||
for t in v.targets:
|
||||
t.id = local_obj
|
||||
|
||||
if not local_obj.get(AVALON_PROPERTY):
|
||||
local_obj[AVALON_PROPERTY] = dict()
|
||||
|
||||
avalon_info = local_obj[AVALON_PROPERTY]
|
||||
avalon_info.update({"container_name": group_name})
|
||||
|
||||
objects.reverse()
|
||||
|
||||
bpy.data.orphans_purge(do_local_ids=False)
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
return objects
|
||||
|
||||
def process_asset(
|
||||
self, context: dict, name: str, namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None
|
||||
) -> Optional[List]:
|
||||
"""
|
||||
Arguments:
|
||||
name: Use pre-defined name
|
||||
namespace: Use pre-defined namespace
|
||||
context: Full parenthood of representation to load
|
||||
options: Additional settings dictionary
|
||||
"""
|
||||
libpath = self.fname
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
|
||||
asset_name = plugin.asset_name(asset, subset)
|
||||
unique_number = plugin.get_unique_number(asset, subset)
|
||||
group_name = plugin.asset_name(asset, subset, unique_number)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_container:
|
||||
avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS)
|
||||
bpy.context.scene.collection.children.link(avalon_container)
|
||||
|
||||
asset_group = bpy.data.objects.new(group_name, object_data=None)
|
||||
asset_group.empty_display_type = 'SINGLE_ARROW'
|
||||
avalon_container.objects.link(asset_group)
|
||||
|
||||
objects = self._process(libpath, asset_group, group_name, None)
|
||||
|
||||
for child in asset_group.children:
|
||||
if child.get(AVALON_PROPERTY):
|
||||
avalon_container.objects.link(child)
|
||||
|
||||
bpy.context.scene.collection.objects.link(asset_group)
|
||||
|
||||
asset_group[AVALON_PROPERTY] = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": name,
|
||||
"namespace": namespace or '',
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"libpath": libpath,
|
||||
"asset_name": asset_name,
|
||||
"parent": str(context["representation"]["parent"]),
|
||||
"family": context["representation"]["context"]["family"],
|
||||
"objectName": group_name
|
||||
}
|
||||
|
||||
self[:] = objects
|
||||
return objects
|
||||
|
||||
def update(self, container: Dict, representation: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
ones and add them to the collection.
|
||||
If the objects of the collection are used in another collection they
|
||||
will not be removed, only unlinked. Normally this should not be the
|
||||
case though.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
libpath = Path(api.get_representation_path(representation))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
self.log.info(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
)
|
||||
|
||||
assert asset_group, (
|
||||
f"The asset is not loaded: {container['objectName']}"
|
||||
)
|
||||
assert libpath, (
|
||||
"No existing library file found for {container['objectName']}"
|
||||
)
|
||||
assert libpath.is_file(), (
|
||||
f"The file doesn't exist: {libpath}"
|
||||
)
|
||||
assert extension in plugin.VALID_EXTENSIONS, (
|
||||
f"Unsupported file: {libpath}"
|
||||
)
|
||||
|
||||
metadata = asset_group.get(AVALON_PROPERTY)
|
||||
group_libpath = metadata["libpath"]
|
||||
|
||||
normalized_group_libpath = (
|
||||
str(Path(bpy.path.abspath(group_libpath)).resolve())
|
||||
)
|
||||
normalized_libpath = (
|
||||
str(Path(bpy.path.abspath(str(libpath))).resolve())
|
||||
)
|
||||
self.log.debug(
|
||||
"normalized_group_libpath:\n %s\nnormalized_libpath:\n %s",
|
||||
normalized_group_libpath,
|
||||
normalized_libpath,
|
||||
)
|
||||
if normalized_group_libpath == normalized_libpath:
|
||||
self.log.info("Library already loaded, not updating...")
|
||||
return
|
||||
|
||||
actions = {}
|
||||
|
||||
for obj in asset_group.children:
|
||||
obj_meta = obj.get(AVALON_PROPERTY)
|
||||
if obj_meta.get('family') == 'rig':
|
||||
rig = None
|
||||
for child in obj.children:
|
||||
if child.type == 'ARMATURE':
|
||||
rig = child
|
||||
break
|
||||
if not rig:
|
||||
raise Exception("No armature in the rig asset group.")
|
||||
if rig.animation_data and rig.animation_data.action:
|
||||
instance_name = obj_meta.get('instance_name')
|
||||
actions[instance_name] = rig.animation_data.action
|
||||
|
||||
mat = asset_group.matrix_basis.copy()
|
||||
|
||||
# Remove the children of the asset_group first
|
||||
for child in list(asset_group.children):
|
||||
self._remove_asset_and_library(child)
|
||||
|
||||
# Check how many assets use the same library
|
||||
count = 0
|
||||
for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects:
|
||||
if obj.get(AVALON_PROPERTY).get('libpath') == group_libpath:
|
||||
count += 1
|
||||
|
||||
self._remove(asset_group)
|
||||
|
||||
# If it is the last object to use that library, remove it
|
||||
if count == 1:
|
||||
library = bpy.data.libraries.get(bpy.path.basename(group_libpath))
|
||||
bpy.data.libraries.remove(library)
|
||||
|
||||
self._process(str(libpath), asset_group, object_name, actions)
|
||||
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
for child in asset_group.children:
|
||||
if child.get(AVALON_PROPERTY):
|
||||
avalon_container.objects.link(child)
|
||||
|
||||
asset_group.matrix_basis = mat
|
||||
|
||||
metadata["libpath"] = str(libpath)
|
||||
metadata["representation"] = str(representation["_id"])
|
||||
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
||||
Arguments:
|
||||
container (openpype:container-1.0): Container to remove,
|
||||
from `host.ls()`.
|
||||
|
||||
Returns:
|
||||
bool: Whether the container was deleted.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
|
||||
if not asset_group:
|
||||
return False
|
||||
|
||||
# Remove the children of the asset_group first
|
||||
for child in list(asset_group.children):
|
||||
self._remove_asset_and_library(child)
|
||||
|
||||
self._remove_asset_and_library(asset_group)
|
||||
|
||||
return True
|
||||
259
openpype/hosts/blender/plugins/load/load_layout_json.py
Normal file
259
openpype/hosts/blender/plugins/load/load_layout_json.py
Normal file
|
|
@ -0,0 +1,259 @@
|
|||
"""Load a layout in Blender."""
|
||||
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
from typing import Dict, Optional
|
||||
|
||||
import bpy
|
||||
import json
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender.pipeline import AVALON_CONTAINERS
|
||||
from avalon.blender.pipeline import AVALON_CONTAINER_ID
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from avalon.blender.pipeline import AVALON_INSTANCES
|
||||
from openpype.hosts.blender.api import plugin
|
||||
|
||||
|
||||
class JsonLayoutLoader(plugin.AssetLoader):
|
||||
"""Load layout published from Unreal."""
|
||||
|
||||
families = ["layout"]
|
||||
representations = ["json"]
|
||||
|
||||
label = "Load Layout"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
animation_creator_name = "CreateAnimation"
|
||||
|
||||
def _remove(self, asset_group):
|
||||
objects = list(asset_group.children)
|
||||
|
||||
for obj in objects:
|
||||
api.remove(obj.get(AVALON_PROPERTY))
|
||||
|
||||
def _remove_animation_instances(self, asset_group):
|
||||
instances = bpy.data.collections.get(AVALON_INSTANCES)
|
||||
if instances:
|
||||
for obj in list(asset_group.children):
|
||||
anim_collection = instances.children.get(
|
||||
obj.name + "_animation")
|
||||
if anim_collection:
|
||||
bpy.data.collections.remove(anim_collection)
|
||||
|
||||
def _get_loader(self, loaders, family):
|
||||
name = ""
|
||||
if family == 'rig':
|
||||
name = "BlendRigLoader"
|
||||
elif family == 'model':
|
||||
name = "BlendModelLoader"
|
||||
|
||||
if name == "":
|
||||
return None
|
||||
|
||||
for loader in loaders:
|
||||
if loader.__name__ == name:
|
||||
return loader
|
||||
|
||||
return None
|
||||
|
||||
def _process(self, libpath, asset, asset_group, actions):
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
with open(libpath, "r") as fp:
|
||||
data = json.load(fp)
|
||||
|
||||
all_loaders = api.discover(api.Loader)
|
||||
|
||||
for element in data:
|
||||
reference = element.get('reference')
|
||||
family = element.get('family')
|
||||
|
||||
loaders = api.loaders_from_representation(all_loaders, reference)
|
||||
loader = self._get_loader(loaders, family)
|
||||
|
||||
if not loader:
|
||||
continue
|
||||
|
||||
instance_name = element.get('instance_name')
|
||||
|
||||
action = None
|
||||
|
||||
if actions:
|
||||
action = actions.get(instance_name, None)
|
||||
|
||||
options = {
|
||||
'parent': asset_group,
|
||||
'transform': element.get('transform'),
|
||||
'action': action,
|
||||
'create_animation': True if family == 'rig' else False,
|
||||
'animation_asset': asset
|
||||
}
|
||||
|
||||
# This should return the loaded asset, but the load call will be
|
||||
# added to the queue to run in the Blender main thread, so
|
||||
# at this time it will not return anything. The assets will be
|
||||
# loaded in the next Blender cycle, so we use the options to
|
||||
# set the transform, parent and assign the action, if there is one.
|
||||
api.load(
|
||||
loader,
|
||||
reference,
|
||||
namespace=instance_name,
|
||||
options=options
|
||||
)
|
||||
|
||||
def process_asset(self,
|
||||
context: dict,
|
||||
name: str,
|
||||
namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None):
|
||||
"""
|
||||
Arguments:
|
||||
name: Use pre-defined name
|
||||
namespace: Use pre-defined namespace
|
||||
context: Full parenthood of representation to load
|
||||
options: Additional settings dictionary
|
||||
"""
|
||||
libpath = self.fname
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
|
||||
asset_name = plugin.asset_name(asset, subset)
|
||||
unique_number = plugin.get_unique_number(asset, subset)
|
||||
group_name = plugin.asset_name(asset, subset, unique_number)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_container:
|
||||
avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS)
|
||||
bpy.context.scene.collection.children.link(avalon_container)
|
||||
|
||||
asset_group = bpy.data.objects.new(group_name, object_data=None)
|
||||
asset_group.empty_display_type = 'SINGLE_ARROW'
|
||||
avalon_container.objects.link(asset_group)
|
||||
|
||||
self._process(libpath, asset, asset_group, None)
|
||||
|
||||
bpy.context.scene.collection.objects.link(asset_group)
|
||||
|
||||
asset_group[AVALON_PROPERTY] = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": name,
|
||||
"namespace": namespace or '',
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"libpath": libpath,
|
||||
"asset_name": asset_name,
|
||||
"parent": str(context["representation"]["parent"]),
|
||||
"family": context["representation"]["context"]["family"],
|
||||
"objectName": group_name
|
||||
}
|
||||
|
||||
self[:] = asset_group.children
|
||||
return asset_group.children
|
||||
|
||||
def exec_update(self, container: Dict, representation: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
ones and add them to the collection.
|
||||
If the objects of the collection are used in another collection they
|
||||
will not be removed, only unlinked. Normally this should not be the
|
||||
case though.
|
||||
"""
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
libpath = Path(api.get_representation_path(representation))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
self.log.info(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
)
|
||||
|
||||
assert asset_group, (
|
||||
f"The asset is not loaded: {container['objectName']}"
|
||||
)
|
||||
assert libpath, (
|
||||
"No existing library file found for {container['objectName']}"
|
||||
)
|
||||
assert libpath.is_file(), (
|
||||
f"The file doesn't exist: {libpath}"
|
||||
)
|
||||
assert extension in plugin.VALID_EXTENSIONS, (
|
||||
f"Unsupported file: {libpath}"
|
||||
)
|
||||
|
||||
metadata = asset_group.get(AVALON_PROPERTY)
|
||||
group_libpath = metadata["libpath"]
|
||||
|
||||
normalized_group_libpath = (
|
||||
str(Path(bpy.path.abspath(group_libpath)).resolve())
|
||||
)
|
||||
normalized_libpath = (
|
||||
str(Path(bpy.path.abspath(str(libpath))).resolve())
|
||||
)
|
||||
self.log.debug(
|
||||
"normalized_group_libpath:\n %s\nnormalized_libpath:\n %s",
|
||||
normalized_group_libpath,
|
||||
normalized_libpath,
|
||||
)
|
||||
if normalized_group_libpath == normalized_libpath:
|
||||
self.log.info("Library already loaded, not updating...")
|
||||
return
|
||||
|
||||
actions = {}
|
||||
|
||||
for obj in asset_group.children:
|
||||
obj_meta = obj.get(AVALON_PROPERTY)
|
||||
if obj_meta.get('family') == 'rig':
|
||||
rig = None
|
||||
for child in obj.children:
|
||||
if child.type == 'ARMATURE':
|
||||
rig = child
|
||||
break
|
||||
if not rig:
|
||||
raise Exception("No armature in the rig asset group.")
|
||||
if rig.animation_data and rig.animation_data.action:
|
||||
namespace = obj_meta.get('namespace')
|
||||
actions[namespace] = rig.animation_data.action
|
||||
|
||||
mat = asset_group.matrix_basis.copy()
|
||||
|
||||
self._remove_animation_instances(asset_group)
|
||||
|
||||
self._remove(asset_group)
|
||||
|
||||
self._process(str(libpath), asset_group, actions)
|
||||
|
||||
asset_group.matrix_basis = mat
|
||||
|
||||
metadata["libpath"] = str(libpath)
|
||||
metadata["representation"] = str(representation["_id"])
|
||||
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
||||
Arguments:
|
||||
container (openpype:container-1.0): Container to remove,
|
||||
from `host.ls()`.
|
||||
|
||||
Returns:
|
||||
bool: Whether the container was deleted.
|
||||
"""
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
|
||||
if not asset_group:
|
||||
return False
|
||||
|
||||
self._remove_animation_instances(asset_group)
|
||||
|
||||
self._remove(asset_group)
|
||||
|
||||
bpy.data.objects.remove(asset_group)
|
||||
|
||||
return True
|
||||
|
|
@ -1,13 +1,16 @@
|
|||
"""Load a model asset in Blender."""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from avalon import api, blender
|
||||
import bpy
|
||||
import openpype.hosts.blender.api.plugin as plugin
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender.pipeline import AVALON_CONTAINERS
|
||||
from avalon.blender.pipeline import AVALON_CONTAINER_ID
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from openpype.hosts.blender.api import plugin
|
||||
|
||||
|
||||
class BlendModelLoader(plugin.AssetLoader):
|
||||
|
|
@ -24,52 +27,75 @@ class BlendModelLoader(plugin.AssetLoader):
|
|||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def _remove(self, objects, container):
|
||||
for obj in list(objects):
|
||||
for material_slot in list(obj.material_slots):
|
||||
bpy.data.materials.remove(material_slot.material)
|
||||
bpy.data.meshes.remove(obj.data)
|
||||
def _remove(self, asset_group):
|
||||
objects = list(asset_group.children)
|
||||
|
||||
bpy.data.collections.remove(container)
|
||||
for obj in objects:
|
||||
if obj.type == 'MESH':
|
||||
for material_slot in list(obj.material_slots):
|
||||
bpy.data.materials.remove(material_slot.material)
|
||||
bpy.data.meshes.remove(obj.data)
|
||||
elif obj.type == 'EMPTY':
|
||||
objects.extend(obj.children)
|
||||
bpy.data.objects.remove(obj)
|
||||
|
||||
def _process(
|
||||
self, libpath, lib_container, container_name,
|
||||
parent_collection
|
||||
):
|
||||
relative = bpy.context.preferences.filepaths.use_relative_paths
|
||||
def _process(self, libpath, asset_group, group_name):
|
||||
with bpy.data.libraries.load(
|
||||
libpath, link=True, relative=relative
|
||||
) as (_, data_to):
|
||||
data_to.collections = [lib_container]
|
||||
libpath, link=True, relative=False
|
||||
) as (data_from, data_to):
|
||||
data_to.objects = data_from.objects
|
||||
|
||||
parent = parent_collection
|
||||
parent = bpy.context.scene.collection
|
||||
|
||||
if parent is None:
|
||||
parent = bpy.context.scene.collection
|
||||
empties = [obj for obj in data_to.objects if obj.type == 'EMPTY']
|
||||
|
||||
parent.children.link(bpy.data.collections[lib_container])
|
||||
container = None
|
||||
|
||||
model_container = parent.children[lib_container].make_local()
|
||||
model_container.name = container_name
|
||||
for empty in empties:
|
||||
if empty.get(AVALON_PROPERTY):
|
||||
container = empty
|
||||
break
|
||||
|
||||
for obj in model_container.objects:
|
||||
local_obj = plugin.prepare_data(obj, container_name)
|
||||
plugin.prepare_data(local_obj.data, container_name)
|
||||
assert container, "No asset group found"
|
||||
|
||||
for material_slot in local_obj.material_slots:
|
||||
plugin.prepare_data(material_slot.material, container_name)
|
||||
# Children must be linked before parents,
|
||||
# otherwise the hierarchy will break
|
||||
objects = []
|
||||
nodes = list(container.children)
|
||||
|
||||
if not obj.get(blender.pipeline.AVALON_PROPERTY):
|
||||
local_obj[blender.pipeline.AVALON_PROPERTY] = dict()
|
||||
for obj in nodes:
|
||||
obj.parent = asset_group
|
||||
|
||||
avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY]
|
||||
avalon_info.update({"container_name": container_name})
|
||||
for obj in nodes:
|
||||
objects.append(obj)
|
||||
nodes.extend(list(obj.children))
|
||||
|
||||
model_container.pop(blender.pipeline.AVALON_PROPERTY)
|
||||
objects.reverse()
|
||||
|
||||
for obj in objects:
|
||||
parent.objects.link(obj)
|
||||
|
||||
for obj in objects:
|
||||
local_obj = plugin.prepare_data(obj, group_name)
|
||||
if local_obj.type != 'EMPTY':
|
||||
plugin.prepare_data(local_obj.data, group_name)
|
||||
|
||||
for material_slot in local_obj.material_slots:
|
||||
plugin.prepare_data(material_slot.material, group_name)
|
||||
|
||||
if not local_obj.get(AVALON_PROPERTY):
|
||||
local_obj[AVALON_PROPERTY] = dict()
|
||||
|
||||
avalon_info = local_obj[AVALON_PROPERTY]
|
||||
avalon_info.update({"container_name": group_name})
|
||||
|
||||
objects.reverse()
|
||||
|
||||
bpy.data.orphans_purge(do_local_ids=False)
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
return model_container
|
||||
return objects
|
||||
|
||||
def process_asset(
|
||||
self, context: dict, name: str, namespace: Optional[str] = None,
|
||||
|
|
@ -82,54 +108,80 @@ class BlendModelLoader(plugin.AssetLoader):
|
|||
context: Full parenthood of representation to load
|
||||
options: Additional settings dictionary
|
||||
"""
|
||||
|
||||
libpath = self.fname
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
|
||||
lib_container = plugin.asset_name(
|
||||
asset, subset
|
||||
)
|
||||
unique_number = plugin.get_unique_number(
|
||||
asset, subset
|
||||
)
|
||||
asset_name = plugin.asset_name(asset, subset)
|
||||
unique_number = plugin.get_unique_number(asset, subset)
|
||||
group_name = plugin.asset_name(asset, subset, unique_number)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
container_name = plugin.asset_name(
|
||||
asset, subset, unique_number
|
||||
)
|
||||
|
||||
container = bpy.data.collections.new(lib_container)
|
||||
container.name = container_name
|
||||
blender.pipeline.containerise_existing(
|
||||
container,
|
||||
name,
|
||||
namespace,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
)
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_container:
|
||||
avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS)
|
||||
bpy.context.scene.collection.children.link(avalon_container)
|
||||
|
||||
metadata = container.get(blender.pipeline.AVALON_PROPERTY)
|
||||
asset_group = bpy.data.objects.new(group_name, object_data=None)
|
||||
asset_group.empty_display_type = 'SINGLE_ARROW'
|
||||
avalon_container.objects.link(asset_group)
|
||||
|
||||
metadata["libpath"] = libpath
|
||||
metadata["lib_container"] = lib_container
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
obj_container = self._process(
|
||||
libpath, lib_container, container_name, None)
|
||||
if options is not None:
|
||||
parent = options.get('parent')
|
||||
transform = options.get('transform')
|
||||
|
||||
metadata["obj_container"] = obj_container
|
||||
if parent and transform:
|
||||
location = transform.get('translation')
|
||||
rotation = transform.get('rotation')
|
||||
scale = transform.get('scale')
|
||||
|
||||
# Save the list of objects in the metadata container
|
||||
metadata["objects"] = obj_container.all_objects
|
||||
asset_group.location = (
|
||||
location.get('x'),
|
||||
location.get('y'),
|
||||
location.get('z')
|
||||
)
|
||||
asset_group.rotation_euler = (
|
||||
rotation.get('x'),
|
||||
rotation.get('y'),
|
||||
rotation.get('z')
|
||||
)
|
||||
asset_group.scale = (
|
||||
scale.get('x'),
|
||||
scale.get('y'),
|
||||
scale.get('z')
|
||||
)
|
||||
|
||||
metadata["parent"] = str(context["representation"]["parent"])
|
||||
metadata["family"] = context["representation"]["context"]["family"]
|
||||
bpy.context.view_layer.objects.active = parent
|
||||
asset_group.select_set(True)
|
||||
|
||||
nodes = list(container.objects)
|
||||
nodes.append(container)
|
||||
self[:] = nodes
|
||||
return nodes
|
||||
bpy.ops.object.parent_set(keep_transform=True)
|
||||
|
||||
def update(self, container: Dict, representation: Dict):
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
objects = self._process(libpath, asset_group, group_name)
|
||||
|
||||
bpy.context.scene.collection.objects.link(asset_group)
|
||||
|
||||
asset_group[AVALON_PROPERTY] = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": name,
|
||||
"namespace": namespace or '',
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"libpath": libpath,
|
||||
"asset_name": asset_name,
|
||||
"parent": str(context["representation"]["parent"]),
|
||||
"family": context["representation"]["context"]["family"],
|
||||
"objectName": group_name
|
||||
}
|
||||
|
||||
self[:] = objects
|
||||
return objects
|
||||
|
||||
def exec_update(self, container: Dict, representation: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
|
|
@ -137,13 +189,9 @@ class BlendModelLoader(plugin.AssetLoader):
|
|||
If the objects of the collection are used in another collection they
|
||||
will not be removed, only unlinked. Normally this should not be the
|
||||
case though.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
collection = bpy.data.collections.get(
|
||||
container["objectName"]
|
||||
)
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
libpath = Path(api.get_representation_path(representation))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
|
|
@ -153,12 +201,9 @@ class BlendModelLoader(plugin.AssetLoader):
|
|||
pformat(representation, indent=2),
|
||||
)
|
||||
|
||||
assert collection, (
|
||||
assert asset_group, (
|
||||
f"The asset is not loaded: {container['objectName']}"
|
||||
)
|
||||
assert not (collection.children), (
|
||||
"Nested collections are not supported."
|
||||
)
|
||||
assert libpath, (
|
||||
"No existing library file found for {container['objectName']}"
|
||||
)
|
||||
|
|
@ -169,47 +214,47 @@ class BlendModelLoader(plugin.AssetLoader):
|
|||
f"Unsupported file: {libpath}"
|
||||
)
|
||||
|
||||
collection_metadata = collection.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
collection_libpath = collection_metadata["libpath"]
|
||||
lib_container = collection_metadata["lib_container"]
|
||||
metadata = asset_group.get(AVALON_PROPERTY)
|
||||
group_libpath = metadata["libpath"]
|
||||
|
||||
obj_container = plugin.get_local_collection_with_name(
|
||||
collection_metadata["obj_container"].name
|
||||
)
|
||||
objects = obj_container.all_objects
|
||||
|
||||
container_name = obj_container.name
|
||||
|
||||
normalized_collection_libpath = (
|
||||
str(Path(bpy.path.abspath(collection_libpath)).resolve())
|
||||
normalized_group_libpath = (
|
||||
str(Path(bpy.path.abspath(group_libpath)).resolve())
|
||||
)
|
||||
normalized_libpath = (
|
||||
str(Path(bpy.path.abspath(str(libpath))).resolve())
|
||||
)
|
||||
self.log.debug(
|
||||
"normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s",
|
||||
normalized_collection_libpath,
|
||||
"normalized_group_libpath:\n %s\nnormalized_libpath:\n %s",
|
||||
normalized_group_libpath,
|
||||
normalized_libpath,
|
||||
)
|
||||
if normalized_collection_libpath == normalized_libpath:
|
||||
if normalized_group_libpath == normalized_libpath:
|
||||
self.log.info("Library already loaded, not updating...")
|
||||
return
|
||||
|
||||
parent = plugin.get_parent_collection(obj_container)
|
||||
# Check how many assets use the same library
|
||||
count = 0
|
||||
for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects:
|
||||
if obj.get(AVALON_PROPERTY).get('libpath') == group_libpath:
|
||||
count += 1
|
||||
|
||||
self._remove(objects, obj_container)
|
||||
mat = asset_group.matrix_basis.copy()
|
||||
|
||||
obj_container = self._process(
|
||||
str(libpath), lib_container, container_name, parent)
|
||||
self._remove(asset_group)
|
||||
|
||||
# Save the list of objects in the metadata container
|
||||
collection_metadata["obj_container"] = obj_container
|
||||
collection_metadata["objects"] = obj_container.all_objects
|
||||
collection_metadata["libpath"] = str(libpath)
|
||||
collection_metadata["representation"] = str(representation["_id"])
|
||||
# If it is the last object to use that library, remove it
|
||||
if count == 1:
|
||||
library = bpy.data.libraries.get(bpy.path.basename(group_libpath))
|
||||
bpy.data.libraries.remove(library)
|
||||
|
||||
def remove(self, container: Dict) -> bool:
|
||||
self._process(str(libpath), asset_group, object_name)
|
||||
|
||||
asset_group.matrix_basis = mat
|
||||
|
||||
metadata["libpath"] = str(libpath)
|
||||
metadata["representation"] = str(representation["_id"])
|
||||
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
||||
Arguments:
|
||||
|
|
@ -218,29 +263,27 @@ class BlendModelLoader(plugin.AssetLoader):
|
|||
|
||||
Returns:
|
||||
bool: Whether the container was deleted.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
collection = bpy.data.collections.get(
|
||||
container["objectName"]
|
||||
)
|
||||
if not collection:
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
libpath = asset_group.get(AVALON_PROPERTY).get('libpath')
|
||||
|
||||
# Check how many assets use the same library
|
||||
count = 0
|
||||
for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects:
|
||||
if obj.get(AVALON_PROPERTY).get('libpath') == libpath:
|
||||
count += 1
|
||||
|
||||
if not asset_group:
|
||||
return False
|
||||
assert not (collection.children), (
|
||||
"Nested collections are not supported."
|
||||
)
|
||||
|
||||
collection_metadata = collection.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
self._remove(asset_group)
|
||||
|
||||
obj_container = plugin.get_local_collection_with_name(
|
||||
collection_metadata["obj_container"].name
|
||||
)
|
||||
objects = obj_container.all_objects
|
||||
bpy.data.objects.remove(asset_group)
|
||||
|
||||
self._remove(objects, obj_container)
|
||||
|
||||
bpy.data.collections.remove(collection)
|
||||
# If it is the last object to use that library, remove it
|
||||
if count == 1:
|
||||
library = bpy.data.libraries.get(bpy.path.basename(libpath))
|
||||
bpy.data.libraries.remove(library)
|
||||
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -1,21 +1,21 @@
|
|||
"""Load a rig asset in Blender."""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from avalon import api, blender
|
||||
import bpy
|
||||
import openpype.hosts.blender.api.plugin as plugin
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender.pipeline import AVALON_CONTAINERS
|
||||
from avalon.blender.pipeline import AVALON_CONTAINER_ID
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from openpype import lib
|
||||
from openpype.hosts.blender.api import plugin
|
||||
|
||||
|
||||
class BlendRigLoader(plugin.AssetLoader):
|
||||
"""Load rigs from a .blend file.
|
||||
|
||||
Because they come from a .blend file we can simply link the collection that
|
||||
contains the model. There is no further need to 'containerise' it.
|
||||
"""
|
||||
"""Load rigs from a .blend file."""
|
||||
|
||||
families = ["rig"]
|
||||
representations = ["blend"]
|
||||
|
|
@ -24,105 +24,113 @@ class BlendRigLoader(plugin.AssetLoader):
|
|||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def _remove(self, objects, obj_container):
|
||||
for obj in list(objects):
|
||||
if obj.type == 'ARMATURE':
|
||||
bpy.data.armatures.remove(obj.data)
|
||||
elif obj.type == 'MESH':
|
||||
def _remove(self, asset_group):
|
||||
objects = list(asset_group.children)
|
||||
|
||||
for obj in objects:
|
||||
if obj.type == 'MESH':
|
||||
for material_slot in list(obj.material_slots):
|
||||
if material_slot.material:
|
||||
bpy.data.materials.remove(material_slot.material)
|
||||
bpy.data.meshes.remove(obj.data)
|
||||
elif obj.type == 'ARMATURE':
|
||||
objects.extend(obj.children)
|
||||
bpy.data.armatures.remove(obj.data)
|
||||
elif obj.type == 'CURVE':
|
||||
bpy.data.curves.remove(obj.data)
|
||||
elif obj.type == 'EMPTY':
|
||||
objects.extend(obj.children)
|
||||
bpy.data.objects.remove(obj)
|
||||
|
||||
for child in obj_container.children:
|
||||
bpy.data.collections.remove(child)
|
||||
|
||||
bpy.data.collections.remove(obj_container)
|
||||
|
||||
def make_local_and_metadata(self, obj, collection_name):
|
||||
local_obj = plugin.prepare_data(obj, collection_name)
|
||||
plugin.prepare_data(local_obj.data, collection_name)
|
||||
|
||||
if not local_obj.get(blender.pipeline.AVALON_PROPERTY):
|
||||
local_obj[blender.pipeline.AVALON_PROPERTY] = dict()
|
||||
|
||||
avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY]
|
||||
avalon_info.update({"container_name": collection_name + '_CON'})
|
||||
|
||||
return local_obj
|
||||
|
||||
def _process(
|
||||
self, libpath, lib_container, collection_name,
|
||||
action, parent_collection
|
||||
):
|
||||
relative = bpy.context.preferences.filepaths.use_relative_paths
|
||||
def _process(self, libpath, asset_group, group_name, action):
|
||||
with bpy.data.libraries.load(
|
||||
libpath, link=True, relative=relative
|
||||
) as (_, data_to):
|
||||
data_to.collections = [lib_container]
|
||||
libpath, link=True, relative=False
|
||||
) as (data_from, data_to):
|
||||
data_to.objects = data_from.objects
|
||||
|
||||
parent = parent_collection
|
||||
parent = bpy.context.scene.collection
|
||||
|
||||
if parent is None:
|
||||
parent = bpy.context.scene.collection
|
||||
empties = [obj for obj in data_to.objects if obj.type == 'EMPTY']
|
||||
|
||||
parent.children.link(bpy.data.collections[lib_container])
|
||||
container = None
|
||||
|
||||
rig_container = parent.children[lib_container].make_local()
|
||||
rig_container.name = collection_name
|
||||
for empty in empties:
|
||||
if empty.get(AVALON_PROPERTY):
|
||||
container = empty
|
||||
break
|
||||
|
||||
assert container, "No asset group found"
|
||||
|
||||
# Children must be linked before parents,
|
||||
# otherwise the hierarchy will break
|
||||
objects = []
|
||||
armatures = [
|
||||
obj for obj in rig_container.objects
|
||||
if obj.type == 'ARMATURE'
|
||||
]
|
||||
nodes = list(container.children)
|
||||
|
||||
for child in rig_container.children:
|
||||
local_child = plugin.prepare_data(child, collection_name)
|
||||
objects.extend(local_child.objects)
|
||||
for obj in nodes:
|
||||
obj.parent = asset_group
|
||||
|
||||
# for obj in bpy.data.objects:
|
||||
# obj.select_set(False)
|
||||
for obj in nodes:
|
||||
objects.append(obj)
|
||||
nodes.extend(list(obj.children))
|
||||
|
||||
objects.reverse()
|
||||
|
||||
constraints = []
|
||||
|
||||
armatures = [obj for obj in objects if obj.type == 'ARMATURE']
|
||||
|
||||
for armature in armatures:
|
||||
for bone in armature.pose.bones:
|
||||
for constraint in bone.constraints:
|
||||
if hasattr(constraint, 'target'):
|
||||
constraints.append(constraint)
|
||||
|
||||
# Link armatures after other objects.
|
||||
# The armature is unparented for all the non-local meshes,
|
||||
# when it is made local.
|
||||
for obj in objects:
|
||||
local_obj = self.make_local_and_metadata(obj, collection_name)
|
||||
parent.objects.link(obj)
|
||||
|
||||
if obj != local_obj:
|
||||
for constraint in constraints:
|
||||
if constraint.target == obj:
|
||||
constraint.target = local_obj
|
||||
for obj in objects:
|
||||
local_obj = plugin.prepare_data(obj, group_name)
|
||||
|
||||
for armature in armatures:
|
||||
local_obj = self.make_local_and_metadata(armature, collection_name)
|
||||
if local_obj.type == 'MESH':
|
||||
plugin.prepare_data(local_obj.data, group_name)
|
||||
|
||||
if action is not None:
|
||||
local_obj.animation_data.action = action
|
||||
elif local_obj.animation_data.action is not None:
|
||||
plugin.prepare_data(
|
||||
local_obj.animation_data.action, collection_name)
|
||||
if obj != local_obj:
|
||||
for constraint in constraints:
|
||||
if constraint.target == obj:
|
||||
constraint.target = local_obj
|
||||
|
||||
# Set link the drivers to the local object
|
||||
if local_obj.data.animation_data:
|
||||
for d in local_obj.data.animation_data.drivers:
|
||||
for v in d.driver.variables:
|
||||
for t in v.targets:
|
||||
t.id = local_obj
|
||||
for material_slot in local_obj.material_slots:
|
||||
if material_slot.material:
|
||||
plugin.prepare_data(material_slot.material, group_name)
|
||||
elif local_obj.type == 'ARMATURE':
|
||||
plugin.prepare_data(local_obj.data, group_name)
|
||||
|
||||
rig_container.pop(blender.pipeline.AVALON_PROPERTY)
|
||||
if action is not None:
|
||||
local_obj.animation_data.action = action
|
||||
elif local_obj.animation_data.action is not None:
|
||||
plugin.prepare_data(
|
||||
local_obj.animation_data.action, group_name)
|
||||
|
||||
# Set link the drivers to the local object
|
||||
if local_obj.data.animation_data:
|
||||
for d in local_obj.data.animation_data.drivers:
|
||||
for v in d.driver.variables:
|
||||
for t in v.targets:
|
||||
t.id = local_obj
|
||||
|
||||
if not local_obj.get(AVALON_PROPERTY):
|
||||
local_obj[AVALON_PROPERTY] = dict()
|
||||
|
||||
avalon_info = local_obj[AVALON_PROPERTY]
|
||||
avalon_info.update({"container_name": group_name})
|
||||
|
||||
objects.reverse()
|
||||
|
||||
bpy.data.orphans_purge(do_local_ids=False)
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
return rig_container
|
||||
return objects
|
||||
|
||||
def process_asset(
|
||||
self, context: dict, name: str, namespace: Optional[str] = None,
|
||||
|
|
@ -138,61 +146,111 @@ class BlendRigLoader(plugin.AssetLoader):
|
|||
libpath = self.fname
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
lib_container = plugin.asset_name(
|
||||
asset, subset
|
||||
)
|
||||
unique_number = plugin.get_unique_number(
|
||||
asset, subset
|
||||
)
|
||||
|
||||
asset_name = plugin.asset_name(asset, subset)
|
||||
unique_number = plugin.get_unique_number(asset, subset)
|
||||
group_name = plugin.asset_name(asset, subset, unique_number)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
collection_name = plugin.asset_name(
|
||||
asset, subset, unique_number
|
||||
)
|
||||
|
||||
container = bpy.data.collections.new(collection_name)
|
||||
blender.pipeline.containerise_existing(
|
||||
container,
|
||||
name,
|
||||
namespace,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
)
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_container:
|
||||
avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS)
|
||||
bpy.context.scene.collection.children.link(avalon_container)
|
||||
|
||||
metadata = container.get(blender.pipeline.AVALON_PROPERTY)
|
||||
asset_group = bpy.data.objects.new(group_name, object_data=None)
|
||||
asset_group.empty_display_type = 'SINGLE_ARROW'
|
||||
avalon_container.objects.link(asset_group)
|
||||
|
||||
metadata["libpath"] = libpath
|
||||
metadata["lib_container"] = lib_container
|
||||
action = None
|
||||
|
||||
obj_container = self._process(
|
||||
libpath, lib_container, collection_name, None, None)
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
metadata["obj_container"] = obj_container
|
||||
# Save the list of objects in the metadata container
|
||||
metadata["objects"] = obj_container.all_objects
|
||||
create_animation = False
|
||||
|
||||
metadata["parent"] = str(context["representation"]["parent"])
|
||||
metadata["family"] = context["representation"]["context"]["family"]
|
||||
if options is not None:
|
||||
parent = options.get('parent')
|
||||
transform = options.get('transform')
|
||||
action = options.get('action')
|
||||
create_animation = options.get('create_animation')
|
||||
|
||||
nodes = list(container.objects)
|
||||
nodes.append(container)
|
||||
self[:] = nodes
|
||||
return nodes
|
||||
if parent and transform:
|
||||
location = transform.get('translation')
|
||||
rotation = transform.get('rotation')
|
||||
scale = transform.get('scale')
|
||||
|
||||
def update(self, container: Dict, representation: Dict):
|
||||
asset_group.location = (
|
||||
location.get('x'),
|
||||
location.get('y'),
|
||||
location.get('z')
|
||||
)
|
||||
asset_group.rotation_euler = (
|
||||
rotation.get('x'),
|
||||
rotation.get('y'),
|
||||
rotation.get('z')
|
||||
)
|
||||
asset_group.scale = (
|
||||
scale.get('x'),
|
||||
scale.get('y'),
|
||||
scale.get('z')
|
||||
)
|
||||
|
||||
bpy.context.view_layer.objects.active = parent
|
||||
asset_group.select_set(True)
|
||||
|
||||
bpy.ops.object.parent_set(keep_transform=True)
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
objects = self._process(libpath, asset_group, group_name, action)
|
||||
|
||||
if create_animation:
|
||||
creator_plugin = lib.get_creator_by_name("CreateAnimation")
|
||||
if not creator_plugin:
|
||||
raise ValueError("Creator plugin \"CreateAnimation\" was "
|
||||
"not found.")
|
||||
|
||||
asset_group.select_set(True)
|
||||
|
||||
animation_asset = options.get('animation_asset')
|
||||
|
||||
api.create(
|
||||
creator_plugin,
|
||||
name=namespace + "_animation",
|
||||
# name=f"{unique_number}_{subset}_animation",
|
||||
asset=animation_asset,
|
||||
options={"useSelection": False, "asset_group": asset_group},
|
||||
data={"dependencies": str(context["representation"]["_id"])}
|
||||
)
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
bpy.context.scene.collection.objects.link(asset_group)
|
||||
|
||||
asset_group[AVALON_PROPERTY] = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": name,
|
||||
"namespace": namespace or '',
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"libpath": libpath,
|
||||
"asset_name": asset_name,
|
||||
"parent": str(context["representation"]["parent"]),
|
||||
"family": context["representation"]["context"]["family"],
|
||||
"objectName": group_name
|
||||
}
|
||||
|
||||
self[:] = objects
|
||||
return objects
|
||||
|
||||
def exec_update(self, container: Dict, representation: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
ones and add them to the collection.
|
||||
If the objects of the collection are used in another collection they
|
||||
will not be removed, only unlinked. Normally this should not be the
|
||||
case though.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
This will remove all children of the asset group, load the new ones
|
||||
and add them as children of the group.
|
||||
"""
|
||||
collection = bpy.data.collections.get(
|
||||
container["objectName"]
|
||||
)
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
libpath = Path(api.get_representation_path(representation))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
|
|
@ -202,12 +260,9 @@ class BlendRigLoader(plugin.AssetLoader):
|
|||
pformat(representation, indent=2),
|
||||
)
|
||||
|
||||
assert collection, (
|
||||
assert asset_group, (
|
||||
f"The asset is not loaded: {container['objectName']}"
|
||||
)
|
||||
assert not (collection.children), (
|
||||
"Nested collections are not supported."
|
||||
)
|
||||
assert libpath, (
|
||||
"No existing library file found for {container['objectName']}"
|
||||
)
|
||||
|
|
@ -218,89 +273,84 @@ class BlendRigLoader(plugin.AssetLoader):
|
|||
f"Unsupported file: {libpath}"
|
||||
)
|
||||
|
||||
collection_metadata = collection.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
collection_libpath = collection_metadata["libpath"]
|
||||
lib_container = collection_metadata["lib_container"]
|
||||
metadata = asset_group.get(AVALON_PROPERTY)
|
||||
group_libpath = metadata["libpath"]
|
||||
|
||||
obj_container = plugin.get_local_collection_with_name(
|
||||
collection_metadata["obj_container"].name
|
||||
)
|
||||
objects = obj_container.all_objects
|
||||
|
||||
container_name = obj_container.name
|
||||
|
||||
normalized_collection_libpath = (
|
||||
str(Path(bpy.path.abspath(collection_libpath)).resolve())
|
||||
normalized_group_libpath = (
|
||||
str(Path(bpy.path.abspath(group_libpath)).resolve())
|
||||
)
|
||||
normalized_libpath = (
|
||||
str(Path(bpy.path.abspath(str(libpath))).resolve())
|
||||
)
|
||||
self.log.debug(
|
||||
"normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s",
|
||||
normalized_collection_libpath,
|
||||
"normalized_group_libpath:\n %s\nnormalized_libpath:\n %s",
|
||||
normalized_group_libpath,
|
||||
normalized_libpath,
|
||||
)
|
||||
if normalized_collection_libpath == normalized_libpath:
|
||||
if normalized_group_libpath == normalized_libpath:
|
||||
self.log.info("Library already loaded, not updating...")
|
||||
return
|
||||
|
||||
# Check how many assets use the same library
|
||||
count = 0
|
||||
for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects:
|
||||
if obj.get(AVALON_PROPERTY).get('libpath') == group_libpath:
|
||||
count += 1
|
||||
|
||||
# Get the armature of the rig
|
||||
armatures = [obj for obj in objects if obj.type == 'ARMATURE']
|
||||
assert(len(armatures) == 1)
|
||||
objects = asset_group.children
|
||||
armature = [obj for obj in objects if obj.type == 'ARMATURE'][0]
|
||||
|
||||
action = None
|
||||
if armatures[0].animation_data and armatures[0].animation_data.action:
|
||||
action = armatures[0].animation_data.action
|
||||
if armature.animation_data and armature.animation_data.action:
|
||||
action = armature.animation_data.action
|
||||
|
||||
parent = plugin.get_parent_collection(obj_container)
|
||||
mat = asset_group.matrix_basis.copy()
|
||||
|
||||
self._remove(objects, obj_container)
|
||||
self._remove(asset_group)
|
||||
|
||||
obj_container = self._process(
|
||||
str(libpath), lib_container, container_name, action, parent)
|
||||
# If it is the last object to use that library, remove it
|
||||
if count == 1:
|
||||
library = bpy.data.libraries.get(bpy.path.basename(group_libpath))
|
||||
bpy.data.libraries.remove(library)
|
||||
|
||||
# Save the list of objects in the metadata container
|
||||
collection_metadata["obj_container"] = obj_container
|
||||
collection_metadata["objects"] = obj_container.all_objects
|
||||
collection_metadata["libpath"] = str(libpath)
|
||||
collection_metadata["representation"] = str(representation["_id"])
|
||||
self._process(str(libpath), asset_group, object_name, action)
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
asset_group.matrix_basis = mat
|
||||
|
||||
def remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
metadata["libpath"] = str(libpath)
|
||||
metadata["representation"] = str(representation["_id"])
|
||||
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing asset group from a Blender scene.
|
||||
|
||||
Arguments:
|
||||
container (openpype:container-1.0): Container to remove,
|
||||
from `host.ls()`.
|
||||
|
||||
Returns:
|
||||
bool: Whether the container was deleted.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
bool: Whether the asset group was deleted.
|
||||
"""
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
libpath = asset_group.get(AVALON_PROPERTY).get('libpath')
|
||||
|
||||
collection = bpy.data.collections.get(
|
||||
container["objectName"]
|
||||
)
|
||||
if not collection:
|
||||
# Check how many assets use the same library
|
||||
count = 0
|
||||
for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects:
|
||||
if obj.get(AVALON_PROPERTY).get('libpath') == libpath:
|
||||
count += 1
|
||||
|
||||
if not asset_group:
|
||||
return False
|
||||
assert not (collection.children), (
|
||||
"Nested collections are not supported."
|
||||
)
|
||||
|
||||
collection_metadata = collection.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
self._remove(asset_group)
|
||||
|
||||
obj_container = plugin.get_local_collection_with_name(
|
||||
collection_metadata["obj_container"].name
|
||||
)
|
||||
objects = obj_container.all_objects
|
||||
bpy.data.objects.remove(asset_group)
|
||||
|
||||
self._remove(objects, obj_container)
|
||||
|
||||
bpy.data.collections.remove(collection)
|
||||
# If it is the last object to use that library, remove it
|
||||
if count == 1:
|
||||
library = bpy.data.libraries.get(bpy.path.basename(libpath))
|
||||
bpy.data.libraries.remove(library)
|
||||
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import json
|
|||
|
||||
import pyblish.api
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from avalon.blender.pipeline import AVALON_INSTANCES
|
||||
|
||||
|
||||
class CollectInstances(pyblish.api.ContextPlugin):
|
||||
|
|
@ -14,6 +15,20 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
label = "Collect Instances"
|
||||
order = pyblish.api.CollectorOrder
|
||||
|
||||
@staticmethod
|
||||
def get_asset_groups() -> Generator:
|
||||
"""Return all 'model' collections.
|
||||
|
||||
Check if the family is 'model' and if it doesn't have the
|
||||
representation set. If the representation is set, it is a loaded model
|
||||
and we don't want to publish it.
|
||||
"""
|
||||
instances = bpy.data.collections.get(AVALON_INSTANCES)
|
||||
for obj in instances.objects:
|
||||
avalon_prop = obj.get(AVALON_PROPERTY) or dict()
|
||||
if avalon_prop.get('id') == 'pyblish.avalon.instance':
|
||||
yield obj
|
||||
|
||||
@staticmethod
|
||||
def get_collections() -> Generator:
|
||||
"""Return all 'model' collections.
|
||||
|
|
@ -29,8 +44,35 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
def process(self, context):
|
||||
"""Collect the models from the current Blender scene."""
|
||||
asset_groups = self.get_asset_groups()
|
||||
collections = self.get_collections()
|
||||
|
||||
for group in asset_groups:
|
||||
avalon_prop = group[AVALON_PROPERTY]
|
||||
asset = avalon_prop['asset']
|
||||
family = avalon_prop['family']
|
||||
subset = avalon_prop['subset']
|
||||
task = avalon_prop['task']
|
||||
name = f"{asset}_{subset}"
|
||||
instance = context.create_instance(
|
||||
name=name,
|
||||
family=family,
|
||||
families=[family],
|
||||
subset=subset,
|
||||
asset=asset,
|
||||
task=task,
|
||||
)
|
||||
objects = list(group.children)
|
||||
members = set()
|
||||
for obj in objects:
|
||||
objects.extend(list(obj.children))
|
||||
members.add(obj)
|
||||
members.add(group)
|
||||
instance[:] = list(members)
|
||||
self.log.debug(json.dumps(instance.data, indent=4))
|
||||
for obj in instance:
|
||||
self.log.debug(obj)
|
||||
|
||||
for collection in collections:
|
||||
avalon_prop = collection[AVALON_PROPERTY]
|
||||
asset = avalon_prop['asset']
|
||||
|
|
@ -47,6 +89,12 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
task=task,
|
||||
)
|
||||
members = list(collection.objects)
|
||||
if family == "animation":
|
||||
for obj in collection.objects:
|
||||
if obj.type == 'EMPTY' and obj.get(AVALON_PROPERTY):
|
||||
for child in obj.children:
|
||||
if child.type == 'ARMATURE':
|
||||
members.append(child)
|
||||
members.append(collection)
|
||||
instance[:] = members
|
||||
self.log.debug(json.dumps(instance.data, indent=4))
|
||||
|
|
|
|||
|
|
@ -1,12 +1,13 @@
|
|||
import os
|
||||
|
||||
import openpype.api
|
||||
import openpype.hosts.blender.api.plugin
|
||||
from openpype import api
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
|
||||
import bpy
|
||||
|
||||
|
||||
class ExtractABC(openpype.api.Extractor):
|
||||
class ExtractABC(api.Extractor):
|
||||
"""Extract as ABC."""
|
||||
|
||||
label = "Extract ABC"
|
||||
|
|
@ -16,7 +17,6 @@ class ExtractABC(openpype.api.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
# Define extract output file path
|
||||
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = f"{instance.name}.abc"
|
||||
filepath = os.path.join(stagingdir, filename)
|
||||
|
|
@ -28,57 +28,29 @@ class ExtractABC(openpype.api.Extractor):
|
|||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
collections = [
|
||||
obj for obj in instance if type(obj) is bpy.types.Collection]
|
||||
|
||||
assert len(collections) == 1, "There should be one and only one " \
|
||||
"collection collected for this asset"
|
||||
|
||||
old_active_layer_collection = view_layer.active_layer_collection
|
||||
|
||||
layers = view_layer.layer_collection.children
|
||||
|
||||
# Get the layer collection from the collection we need to export.
|
||||
# This is needed because in Blender you can only set the active
|
||||
# collection with the layer collection, and there is no way to get
|
||||
# the layer collection from the collection
|
||||
# (but there is the vice versa).
|
||||
layer_collections = [
|
||||
layer for layer in layers if layer.collection == collections[0]]
|
||||
|
||||
assert len(layer_collections) == 1
|
||||
|
||||
view_layer.active_layer_collection = layer_collections[0]
|
||||
|
||||
old_scale = scene.unit_settings.scale_length
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
selected = list()
|
||||
selected = []
|
||||
asset_group = None
|
||||
|
||||
for obj in instance:
|
||||
try:
|
||||
obj.select_set(True)
|
||||
selected.append(obj)
|
||||
except:
|
||||
continue
|
||||
obj.select_set(True)
|
||||
selected.append(obj)
|
||||
if obj.get(AVALON_PROPERTY):
|
||||
asset_group = obj
|
||||
|
||||
new_context = openpype.hosts.blender.api.plugin.create_blender_context(
|
||||
active=selected[0], selected=selected)
|
||||
|
||||
# We set the scale of the scene for the export
|
||||
scene.unit_settings.scale_length = 0.01
|
||||
context = plugin.create_blender_context(
|
||||
active=asset_group, selected=selected)
|
||||
|
||||
# We export the abc
|
||||
bpy.ops.wm.alembic_export(
|
||||
new_context,
|
||||
context,
|
||||
filepath=filepath,
|
||||
selected=True
|
||||
selected=True,
|
||||
flatten=False
|
||||
)
|
||||
|
||||
view_layer.active_layer_collection = old_active_layer_collection
|
||||
|
||||
scene.unit_settings.scale_length = old_scale
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
import os
|
||||
import avalon.blender.workio
|
||||
|
||||
import bpy
|
||||
|
||||
# import avalon.blender.workio
|
||||
import openpype.api
|
||||
|
||||
|
||||
|
|
@ -9,7 +11,7 @@ class ExtractBlend(openpype.api.Extractor):
|
|||
|
||||
label = "Extract Blend"
|
||||
hosts = ["blender"]
|
||||
families = ["model", "camera", "rig", "action", "layout", "animation"]
|
||||
families = ["model", "camera", "rig", "action", "layout"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -22,15 +24,12 @@ class ExtractBlend(openpype.api.Extractor):
|
|||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
# Just save the file to a temporary location. At least for now it's no
|
||||
# problem to have (possibly) extra stuff in the file.
|
||||
avalon.blender.workio.save_file(filepath, copy=True)
|
||||
#
|
||||
# # Store reference for integration
|
||||
# if "files" not in instance.data:
|
||||
# instance.data["files"] = list()
|
||||
#
|
||||
# # instance.data["files"].append(filename)
|
||||
data_blocks = set()
|
||||
|
||||
for obj in instance:
|
||||
data_blocks.add(obj)
|
||||
|
||||
bpy.data.libraries.write(filepath, data_blocks)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
|
|||
|
|
@ -0,0 +1,53 @@
|
|||
import os
|
||||
|
||||
import bpy
|
||||
|
||||
import openpype.api
|
||||
|
||||
|
||||
class ExtractBlendAnimation(openpype.api.Extractor):
|
||||
"""Extract a blend file."""
|
||||
|
||||
label = "Extract Blend"
|
||||
hosts = ["blender"]
|
||||
families = ["animation"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
# Define extract output file path
|
||||
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = f"{instance.name}.blend"
|
||||
filepath = os.path.join(stagingdir, filename)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
data_blocks = set()
|
||||
|
||||
for obj in instance:
|
||||
if isinstance(obj, bpy.types.Object) and obj.type == 'EMPTY':
|
||||
child = obj.children[0]
|
||||
if child and child.type == 'ARMATURE':
|
||||
if not obj.animation_data:
|
||||
obj.animation_data_create()
|
||||
obj.animation_data.action = child.animation_data.action
|
||||
obj.animation_data_clear()
|
||||
data_blocks.add(child.animation_data.action)
|
||||
data_blocks.add(obj)
|
||||
|
||||
bpy.data.libraries.write(filepath, data_blocks)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'blend',
|
||||
'ext': 'blend',
|
||||
'files': filename,
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s",
|
||||
instance.name, representation)
|
||||
|
|
@ -1,11 +1,13 @@
|
|||
import os
|
||||
|
||||
import openpype.api
|
||||
from openpype import api
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
|
||||
import bpy
|
||||
|
||||
|
||||
class ExtractFBX(openpype.api.Extractor):
|
||||
class ExtractFBX(api.Extractor):
|
||||
"""Extract as FBX."""
|
||||
|
||||
label = "Extract FBX"
|
||||
|
|
@ -15,71 +17,56 @@ class ExtractFBX(openpype.api.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
# Define extract output file path
|
||||
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = f"{instance.name}.fbx"
|
||||
filepath = os.path.join(stagingdir, filename)
|
||||
|
||||
context = bpy.context
|
||||
scene = context.scene
|
||||
view_layer = context.view_layer
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
collections = [
|
||||
obj for obj in instance if type(obj) is bpy.types.Collection]
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
assert len(collections) == 1, "There should be one and only one " \
|
||||
"collection collected for this asset"
|
||||
selected = []
|
||||
asset_group = None
|
||||
|
||||
old_active_layer_collection = view_layer.active_layer_collection
|
||||
for obj in instance:
|
||||
obj.select_set(True)
|
||||
selected.append(obj)
|
||||
if obj.get(AVALON_PROPERTY):
|
||||
asset_group = obj
|
||||
|
||||
layers = view_layer.layer_collection.children
|
||||
|
||||
# Get the layer collection from the collection we need to export.
|
||||
# This is needed because in Blender you can only set the active
|
||||
# collection with the layer collection, and there is no way to get
|
||||
# the layer collection from the collection
|
||||
# (but there is the vice versa).
|
||||
layer_collections = [
|
||||
layer for layer in layers if layer.collection == collections[0]]
|
||||
|
||||
assert len(layer_collections) == 1
|
||||
|
||||
view_layer.active_layer_collection = layer_collections[0]
|
||||
|
||||
old_scale = scene.unit_settings.scale_length
|
||||
|
||||
# We set the scale of the scene for the export
|
||||
scene.unit_settings.scale_length = 0.01
|
||||
context = plugin.create_blender_context(
|
||||
active=asset_group, selected=selected)
|
||||
|
||||
new_materials = []
|
||||
new_materials_objs = []
|
||||
objects = list(asset_group.children)
|
||||
|
||||
for obj in collections[0].all_objects:
|
||||
if obj.type == 'MESH':
|
||||
for obj in objects:
|
||||
objects.extend(obj.children)
|
||||
if obj.type == 'MESH' and len(obj.data.materials) == 0:
|
||||
mat = bpy.data.materials.new(obj.name)
|
||||
obj.data.materials.append(mat)
|
||||
new_materials.append(mat)
|
||||
new_materials_objs.append(obj)
|
||||
|
||||
# We export the fbx
|
||||
bpy.ops.export_scene.fbx(
|
||||
context,
|
||||
filepath=filepath,
|
||||
use_active_collection=True,
|
||||
use_active_collection=False,
|
||||
use_selection=True,
|
||||
mesh_smooth_type='FACE',
|
||||
add_leaf_bones=False
|
||||
)
|
||||
|
||||
view_layer.active_layer_collection = old_active_layer_collection
|
||||
|
||||
scene.unit_settings.scale_length = old_scale
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
for mat in new_materials:
|
||||
bpy.data.materials.remove(mat)
|
||||
|
||||
for obj in collections[0].all_objects:
|
||||
if obj.type == 'MESH':
|
||||
obj.data.materials.pop()
|
||||
for obj in new_materials_objs:
|
||||
obj.data.materials.pop()
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
|
|||
|
|
@ -1,14 +1,16 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
import openpype.api
|
||||
|
||||
import bpy
|
||||
import bpy_extras
|
||||
import bpy_extras.anim_utils
|
||||
|
||||
from openpype import api
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
|
||||
class ExtractAnimationFBX(openpype.api.Extractor):
|
||||
|
||||
class ExtractAnimationFBX(api.Extractor):
|
||||
"""Extract as animation."""
|
||||
|
||||
label = "Extract FBX"
|
||||
|
|
@ -20,33 +22,26 @@ class ExtractAnimationFBX(openpype.api.Extractor):
|
|||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
|
||||
context = bpy.context
|
||||
scene = context.scene
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
collections = [
|
||||
obj for obj in instance if type(obj) is bpy.types.Collection]
|
||||
# The first collection object in the instance is taken, as there
|
||||
# should be only one that contains the asset group.
|
||||
collection = [
|
||||
obj for obj in instance if type(obj) is bpy.types.Collection][0]
|
||||
|
||||
assert len(collections) == 1, "There should be one and only one " \
|
||||
"collection collected for this asset"
|
||||
# Again, the first object in the collection is taken , as there
|
||||
# should be only the asset group in the collection.
|
||||
asset_group = collection.objects[0]
|
||||
|
||||
old_scale = scene.unit_settings.scale_length
|
||||
armature = [
|
||||
obj for obj in asset_group.children if obj.type == 'ARMATURE'][0]
|
||||
|
||||
# We set the scale of the scene for the export
|
||||
scene.unit_settings.scale_length = 0.01
|
||||
|
||||
armatures = [
|
||||
obj for obj in collections[0].objects if obj.type == 'ARMATURE']
|
||||
|
||||
assert len(collections) == 1, "There should be one and only one " \
|
||||
"armature collected for this asset"
|
||||
|
||||
armature = armatures[0]
|
||||
asset_group_name = asset_group.name
|
||||
asset_group.name = asset_group.get(AVALON_PROPERTY).get("asset_name")
|
||||
|
||||
armature_name = armature.name
|
||||
original_name = armature_name.split(':')[0]
|
||||
original_name = armature_name.split(':')[1]
|
||||
armature.name = original_name
|
||||
|
||||
object_action_pairs = []
|
||||
|
|
@ -89,27 +84,29 @@ class ExtractAnimationFBX(openpype.api.Extractor):
|
|||
for obj in bpy.data.objects:
|
||||
obj.select_set(False)
|
||||
|
||||
asset_group.select_set(True)
|
||||
armature.select_set(True)
|
||||
fbx_filename = f"{instance.name}_{armature.name}.fbx"
|
||||
filepath = os.path.join(stagingdir, fbx_filename)
|
||||
|
||||
override = bpy.context.copy()
|
||||
override['selected_objects'] = [armature]
|
||||
override = plugin.create_blender_context(
|
||||
active=asset_group, selected=[asset_group, armature])
|
||||
bpy.ops.export_scene.fbx(
|
||||
override,
|
||||
filepath=filepath,
|
||||
use_active_collection=False,
|
||||
use_selection=True,
|
||||
bake_anim_use_nla_strips=False,
|
||||
bake_anim_use_all_actions=False,
|
||||
add_leaf_bones=False,
|
||||
armature_nodetype='ROOT',
|
||||
object_types={'ARMATURE'}
|
||||
object_types={'EMPTY', 'ARMATURE'}
|
||||
)
|
||||
armature.name = armature_name
|
||||
asset_group.name = asset_group_name
|
||||
asset_group.select_set(False)
|
||||
armature.select_set(False)
|
||||
|
||||
scene.unit_settings.scale_length = old_scale
|
||||
|
||||
# We delete the baked action and set the original one back
|
||||
for i in range(0, len(object_action_pairs)):
|
||||
pair = object_action_pairs[i]
|
||||
|
|
@ -125,18 +122,20 @@ class ExtractAnimationFBX(openpype.api.Extractor):
|
|||
json_filename = f"{instance.name}.json"
|
||||
json_path = os.path.join(stagingdir, json_filename)
|
||||
|
||||
json_dict = {}
|
||||
json_dict = {
|
||||
"instance_name": asset_group.get(AVALON_PROPERTY).get("namespace")
|
||||
}
|
||||
|
||||
collection = instance.data.get("name")
|
||||
container = None
|
||||
for obj in bpy.data.collections[collection].objects:
|
||||
if obj.type == "ARMATURE":
|
||||
container_name = obj.get("avalon").get("container_name")
|
||||
container = bpy.data.collections[container_name]
|
||||
if container:
|
||||
json_dict = {
|
||||
"instance_name": container.get("avalon").get("instance_name")
|
||||
}
|
||||
# collection = instance.data.get("name")
|
||||
# container = None
|
||||
# for obj in bpy.data.collections[collection].objects:
|
||||
# if obj.type == "ARMATURE":
|
||||
# container_name = obj.get("avalon").get("container_name")
|
||||
# container = bpy.data.collections[container_name]
|
||||
# if container:
|
||||
# json_dict = {
|
||||
# "instance_name": container.get("avalon").get("instance_name")
|
||||
# }
|
||||
|
||||
with open(json_path, "w+") as file:
|
||||
json.dump(json_dict, fp=file, indent=2)
|
||||
|
|
@ -159,6 +158,5 @@ class ExtractAnimationFBX(openpype.api.Extractor):
|
|||
instance.data["representations"].append(fbx_representation)
|
||||
instance.data["representations"].append(json_representation)
|
||||
|
||||
|
||||
self.log.info("Extracted instance '{}' to: {}".format(
|
||||
instance.name, fbx_representation))
|
||||
|
|
|
|||
|
|
@ -3,7 +3,8 @@ import json
|
|||
|
||||
import bpy
|
||||
|
||||
from avalon import blender, io
|
||||
from avalon import io
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
import openpype.api
|
||||
|
||||
|
||||
|
|
@ -24,52 +25,49 @@ class ExtractLayout(openpype.api.Extractor):
|
|||
|
||||
json_data = []
|
||||
|
||||
for collection in instance:
|
||||
for asset in collection.children:
|
||||
collection = bpy.data.collections[asset.name]
|
||||
container = bpy.data.collections[asset.name + '_CON']
|
||||
metadata = container.get(blender.pipeline.AVALON_PROPERTY)
|
||||
asset_group = bpy.data.objects[str(instance)]
|
||||
|
||||
parent = metadata["parent"]
|
||||
family = metadata["family"]
|
||||
for asset in asset_group.children:
|
||||
metadata = asset.get(AVALON_PROPERTY)
|
||||
|
||||
self.log.debug("Parent: {}".format(parent))
|
||||
blend = io.find_one(
|
||||
{
|
||||
"type": "representation",
|
||||
"parent": io.ObjectId(parent),
|
||||
"name": "blend"
|
||||
},
|
||||
projection={"_id": True})
|
||||
blend_id = blend["_id"]
|
||||
parent = metadata["parent"]
|
||||
family = metadata["family"]
|
||||
|
||||
json_element = {}
|
||||
json_element["reference"] = str(blend_id)
|
||||
json_element["family"] = family
|
||||
json_element["instance_name"] = asset.name
|
||||
json_element["asset_name"] = metadata["lib_container"]
|
||||
json_element["file_path"] = metadata["libpath"]
|
||||
self.log.debug("Parent: {}".format(parent))
|
||||
blend = io.find_one(
|
||||
{
|
||||
"type": "representation",
|
||||
"parent": io.ObjectId(parent),
|
||||
"name": "blend"
|
||||
},
|
||||
projection={"_id": True})
|
||||
blend_id = blend["_id"]
|
||||
|
||||
obj = collection.objects[0]
|
||||
json_element = {}
|
||||
json_element["reference"] = str(blend_id)
|
||||
json_element["family"] = family
|
||||
json_element["instance_name"] = asset.name
|
||||
json_element["asset_name"] = metadata["asset_name"]
|
||||
json_element["file_path"] = metadata["libpath"]
|
||||
|
||||
json_element["transform"] = {
|
||||
"translation": {
|
||||
"x": obj.location.x,
|
||||
"y": obj.location.y,
|
||||
"z": obj.location.z
|
||||
},
|
||||
"rotation": {
|
||||
"x": obj.rotation_euler.x,
|
||||
"y": obj.rotation_euler.y,
|
||||
"z": obj.rotation_euler.z,
|
||||
},
|
||||
"scale": {
|
||||
"x": obj.scale.x,
|
||||
"y": obj.scale.y,
|
||||
"z": obj.scale.z
|
||||
}
|
||||
json_element["transform"] = {
|
||||
"translation": {
|
||||
"x": asset.location.x,
|
||||
"y": asset.location.y,
|
||||
"z": asset.location.z
|
||||
},
|
||||
"rotation": {
|
||||
"x": asset.rotation_euler.x,
|
||||
"y": asset.rotation_euler.y,
|
||||
"z": asset.rotation_euler.z,
|
||||
},
|
||||
"scale": {
|
||||
"x": asset.scale.x,
|
||||
"y": asset.scale.y,
|
||||
"z": asset.scale.z
|
||||
}
|
||||
json_data.append(json_element)
|
||||
}
|
||||
json_data.append(json_element)
|
||||
|
||||
json_filename = "{}.json".format(instance.name)
|
||||
json_path = os.path.join(stagingdir, json_filename)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,39 @@
|
|||
from typing import List
|
||||
|
||||
import pyblish.api
|
||||
import openpype.hosts.blender.api.action
|
||||
|
||||
|
||||
class ValidateNoColonsInName(pyblish.api.InstancePlugin):
|
||||
"""There cannot be colons in names
|
||||
|
||||
Object or bone names cannot include colons. Other software do not
|
||||
handle colons correctly.
|
||||
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model", "rig"]
|
||||
version = (0, 1, 0)
|
||||
label = "No Colons in names"
|
||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance) -> List:
|
||||
invalid = []
|
||||
for obj in [obj for obj in instance]:
|
||||
if ':' in obj.name:
|
||||
invalid.append(obj)
|
||||
if obj.type == 'ARMATURE':
|
||||
for bone in obj.data.bones:
|
||||
if ':' in bone.name:
|
||||
invalid.append(obj)
|
||||
break
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
f"Objects found with colon in name: {invalid}")
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
from typing import List
|
||||
|
||||
import mathutils
|
||||
|
||||
import pyblish.api
|
||||
import openpype.hosts.blender.api.action
|
||||
|
||||
|
||||
class ValidateTransformZero(pyblish.api.InstancePlugin):
|
||||
"""Transforms can't have any values
|
||||
|
||||
To solve this issue, try freezing the transforms. So long
|
||||
as the transforms, rotation and scale values are zero,
|
||||
you're all good.
|
||||
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
category = "geometry"
|
||||
version = (0, 1, 0)
|
||||
label = "Transform Zero"
|
||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||
|
||||
_identity = mathutils.Matrix()
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance) -> List:
|
||||
invalid = []
|
||||
for obj in [obj for obj in instance]:
|
||||
if obj.matrix_basis != cls._identity:
|
||||
invalid.append(obj)
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
f"Object found in instance is not in Object Mode: {invalid}")
|
||||
|
|
@ -1,17 +1,21 @@
|
|||
import os
|
||||
import sys
|
||||
import logging
|
||||
import contextlib
|
||||
|
||||
import hou
|
||||
|
||||
from pyblish import api as pyblish
|
||||
|
||||
from avalon import api as avalon
|
||||
from avalon.houdini import pipeline as houdini
|
||||
|
||||
import openpype.hosts.houdini
|
||||
from openpype.hosts.houdini.api import lib
|
||||
|
||||
from openpype.lib import any_outdated
|
||||
from openpype.lib import (
|
||||
any_outdated
|
||||
)
|
||||
|
||||
from .lib import get_asset_fps
|
||||
|
||||
log = logging.getLogger("openpype.hosts.houdini")
|
||||
|
||||
|
|
@ -22,6 +26,7 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
|||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
|
||||
def install():
|
||||
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
|
|
@ -29,19 +34,28 @@ def install():
|
|||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
|
||||
log.info("Installing callbacks ... ")
|
||||
avalon.on("init", on_init)
|
||||
# avalon.on("init", on_init)
|
||||
avalon.before("save", before_save)
|
||||
avalon.on("save", on_save)
|
||||
avalon.on("open", on_open)
|
||||
avalon.on("new", on_new)
|
||||
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
log.info("Setting default family states for loader..")
|
||||
avalon.data["familiesStateToggled"] = ["imagesequence"]
|
||||
avalon.data["familiesStateToggled"] = [
|
||||
"imagesequence",
|
||||
"review"
|
||||
]
|
||||
|
||||
# add houdini vendor packages
|
||||
hou_pythonpath = os.path.join(os.path.dirname(HOST_DIR), "vendor")
|
||||
|
||||
def on_init(*args):
|
||||
houdini.on_houdini_initialize()
|
||||
sys.path.append(hou_pythonpath)
|
||||
|
||||
# Set asset FPS for the empty scene directly after launch of Houdini
|
||||
# so it initializes into the correct scene FPS
|
||||
_set_asset_fps()
|
||||
|
||||
|
||||
def before_save(*args):
|
||||
|
|
@ -59,10 +73,18 @@ def on_save(*args):
|
|||
|
||||
def on_open(*args):
|
||||
|
||||
if not hou.isUIAvailable():
|
||||
log.debug("Batch mode detected, ignoring `on_open` callbacks..")
|
||||
return
|
||||
|
||||
avalon.logger.info("Running callback on open..")
|
||||
|
||||
# Validate FPS after update_task_from_path to
|
||||
# ensure it is using correct FPS for the asset
|
||||
lib.validate_fps()
|
||||
|
||||
if any_outdated():
|
||||
from ..widgets import popup
|
||||
from openpype.widgets import popup
|
||||
|
||||
log.warning("Scene has outdated content.")
|
||||
|
||||
|
|
@ -70,7 +92,7 @@ def on_open(*args):
|
|||
parent = hou.ui.mainQtWindow()
|
||||
if parent is None:
|
||||
log.info("Skipping outdated content pop-up "
|
||||
"because Maya window can't be found.")
|
||||
"because Houdini window can't be found.")
|
||||
else:
|
||||
|
||||
# Show outdated pop-up
|
||||
|
|
@ -79,15 +101,52 @@ def on_open(*args):
|
|||
tool.show(parent=parent)
|
||||
|
||||
dialog = popup.Popup(parent=parent)
|
||||
dialog.setWindowTitle("Maya scene has outdated content")
|
||||
dialog.setWindowTitle("Houdini scene has outdated content")
|
||||
dialog.setMessage("There are outdated containers in "
|
||||
"your Maya scene.")
|
||||
dialog.on_show.connect(_on_show_inventory)
|
||||
"your Houdini scene.")
|
||||
dialog.on_clicked.connect(_on_show_inventory)
|
||||
dialog.show()
|
||||
|
||||
|
||||
def on_new(_):
|
||||
"""Set project resolution and fps when create a new file"""
|
||||
avalon.logger.info("Running callback on new..")
|
||||
_set_asset_fps()
|
||||
|
||||
|
||||
def _set_asset_fps():
|
||||
"""Set Houdini scene FPS to the default required for current asset"""
|
||||
|
||||
# Set new scene fps
|
||||
fps = get_asset_fps()
|
||||
print("Setting scene FPS to %i" % fps)
|
||||
lib.set_scene_fps(fps)
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, new_value, old_value):
|
||||
"""Toggle saver tool passthrough states on instance toggles."""
|
||||
@contextlib.contextmanager
|
||||
def main_take(no_update=True):
|
||||
"""Enter root take during context"""
|
||||
original_take = hou.takes.currentTake()
|
||||
original_update_mode = hou.updateModeSetting()
|
||||
root = hou.takes.rootTake()
|
||||
has_changed = False
|
||||
try:
|
||||
if original_take != root:
|
||||
has_changed = True
|
||||
if no_update:
|
||||
hou.setUpdateMode(hou.updateMode.Manual)
|
||||
hou.takes.setCurrentTake(root)
|
||||
yield
|
||||
finally:
|
||||
if has_changed:
|
||||
if no_update:
|
||||
hou.setUpdateMode(original_update_mode)
|
||||
hou.takes.setCurrentTake(original_take)
|
||||
|
||||
if not instance.data.get("_allowToggleBypass", True):
|
||||
return
|
||||
|
||||
nodes = instance[:]
|
||||
if not nodes:
|
||||
|
|
@ -96,8 +155,20 @@ def on_pyblish_instance_toggled(instance, new_value, old_value):
|
|||
# Assume instance node is first node
|
||||
instance_node = nodes[0]
|
||||
|
||||
if not hasattr(instance_node, "isBypassed"):
|
||||
# Likely not a node that can actually be bypassed
|
||||
log.debug("Can't bypass node: %s", instance_node.path())
|
||||
return
|
||||
|
||||
if instance_node.isBypassed() != (not old_value):
|
||||
print("%s old bypass state didn't match old instance state, "
|
||||
"updating anyway.." % instance_node.path())
|
||||
|
||||
instance_node.bypass(not new_value)
|
||||
try:
|
||||
# Go into the main take, because when in another take changing
|
||||
# the bypass state of a note cannot be done due to it being locked
|
||||
# by default.
|
||||
with main_take(no_update=True):
|
||||
instance_node.bypass(not new_value)
|
||||
except hou.PermissionError as exc:
|
||||
log.warning("%s - %s", instance_node.path(), exc)
|
||||
|
|
|
|||
|
|
@ -1,14 +1,19 @@
|
|||
import uuid
|
||||
|
||||
import logging
|
||||
from contextlib import contextmanager
|
||||
|
||||
import hou
|
||||
|
||||
from openpype import lib
|
||||
|
||||
from openpype.api import get_asset
|
||||
from avalon import api, io
|
||||
from avalon.houdini import lib as houdini
|
||||
|
||||
import hou
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_asset_fps():
|
||||
"""Return current asset fps."""
|
||||
return get_asset()["data"].get("fps")
|
||||
|
||||
def set_id(node, unique_id, overwrite=False):
|
||||
|
||||
|
|
@ -171,10 +176,10 @@ def get_output_parameter(node):
|
|||
node_type = node.type().name()
|
||||
if node_type == "geometry":
|
||||
return node.parm("sopoutput")
|
||||
|
||||
elif node_type == "alembic":
|
||||
return node.parm("filename")
|
||||
|
||||
elif node_type == "comp":
|
||||
return node.parm("copoutput")
|
||||
else:
|
||||
raise TypeError("Node type '%s' not supported" % node_type)
|
||||
|
||||
|
|
@ -205,7 +210,7 @@ def validate_fps():
|
|||
|
||||
"""
|
||||
|
||||
fps = lib.get_asset()["data"]["fps"]
|
||||
fps = get_asset_fps()
|
||||
current_fps = hou.fps() # returns float
|
||||
|
||||
if current_fps != fps:
|
||||
|
|
@ -217,18 +222,123 @@ def validate_fps():
|
|||
if parent is None:
|
||||
pass
|
||||
else:
|
||||
dialog = popup.Popup2(parent=parent)
|
||||
dialog = popup.Popup(parent=parent)
|
||||
dialog.setModal(True)
|
||||
dialog.setWindowTitle("Houdini scene not in line with project")
|
||||
dialog.setMessage("The FPS is out of sync, please fix it")
|
||||
dialog.setWindowTitle("Houdini scene does not match project FPS")
|
||||
dialog.setMessage("Scene %i FPS does not match project %i FPS" %
|
||||
(current_fps, fps))
|
||||
dialog.setButtonText("Fix")
|
||||
|
||||
# Set new text for button (add optional argument for the popup?)
|
||||
toggle = dialog.widgets["toggle"]
|
||||
toggle.setEnabled(False)
|
||||
dialog.on_show.connect(lambda: set_scene_fps(fps))
|
||||
# on_show is the Fix button clicked callback
|
||||
dialog.on_clicked.connect(lambda: set_scene_fps(fps))
|
||||
|
||||
dialog.show()
|
||||
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def create_remote_publish_node(force=True):
|
||||
"""Function to create a remote publish node in /out
|
||||
|
||||
This is a hacked "Shell" node that does *nothing* except for triggering
|
||||
`colorbleed.lib.publish_remote()` as pre-render script.
|
||||
|
||||
All default attributes of the Shell node are hidden to the Artist to
|
||||
avoid confusion.
|
||||
|
||||
Additionally some custom attributes are added that can be collected
|
||||
by a Collector to set specific settings for the publish, e.g. whether
|
||||
to separate the jobs per instance or process in one single job.
|
||||
|
||||
"""
|
||||
|
||||
cmd = "import colorbleed.lib; colorbleed.lib.publish_remote()"
|
||||
|
||||
existing = hou.node("/out/REMOTE_PUBLISH")
|
||||
if existing:
|
||||
if force:
|
||||
log.warning("Removing existing '/out/REMOTE_PUBLISH' node..")
|
||||
existing.destroy()
|
||||
else:
|
||||
raise RuntimeError("Node already exists /out/REMOTE_PUBLISH. "
|
||||
"Please remove manually or set `force` to "
|
||||
"True.")
|
||||
|
||||
# Create the shell node
|
||||
out = hou.node("/out")
|
||||
node = out.createNode("shell", node_name="REMOTE_PUBLISH")
|
||||
node.moveToGoodPosition()
|
||||
|
||||
# Set color make it stand out (avalon/pyblish color)
|
||||
node.setColor(hou.Color(0.439, 0.709, 0.933))
|
||||
|
||||
# Set the pre-render script
|
||||
node.setParms({
|
||||
"prerender": cmd,
|
||||
"lprerender": "python" # command language
|
||||
})
|
||||
|
||||
# Lock the attributes to ensure artists won't easily mess things up.
|
||||
node.parm("prerender").lock(True)
|
||||
node.parm("lprerender").lock(True)
|
||||
|
||||
# Lock up the actual shell command
|
||||
command_parm = node.parm("command")
|
||||
command_parm.set("")
|
||||
command_parm.lock(True)
|
||||
shellexec_parm = node.parm("shellexec")
|
||||
shellexec_parm.set(False)
|
||||
shellexec_parm.lock(True)
|
||||
|
||||
# Get the node's parm template group so we can customize it
|
||||
template = node.parmTemplateGroup()
|
||||
|
||||
# Hide default tabs
|
||||
template.hideFolder("Shell", True)
|
||||
template.hideFolder("Scripts", True)
|
||||
|
||||
# Hide default settings
|
||||
template.hide("execute", True)
|
||||
template.hide("renderdialog", True)
|
||||
template.hide("trange", True)
|
||||
template.hide("f", True)
|
||||
template.hide("take", True)
|
||||
|
||||
# Add custom settings to this node.
|
||||
parm_folder = hou.FolderParmTemplate("folder", "Submission Settings")
|
||||
|
||||
# Separate Jobs per Instance
|
||||
parm = hou.ToggleParmTemplate(name="separateJobPerInstance",
|
||||
label="Separate Job per Instance",
|
||||
default_value=False)
|
||||
parm_folder.addParmTemplate(parm)
|
||||
|
||||
# Add our custom Submission Settings folder
|
||||
template.append(parm_folder)
|
||||
|
||||
# Apply template back to the node
|
||||
node.setParmTemplateGroup(template)
|
||||
|
||||
|
||||
def render_rop(ropnode):
|
||||
"""Render ROP node utility for Publishing.
|
||||
|
||||
This renders a ROP node with the settings we want during Publishing.
|
||||
"""
|
||||
# Print verbose when in batch mode without UI
|
||||
verbose = not hou.isUIAvailable()
|
||||
|
||||
# Render
|
||||
try:
|
||||
ropnode.render(verbose=verbose,
|
||||
# Allow Deadline to capture completion percentage
|
||||
output_progress=verbose)
|
||||
except hou.Error as exc:
|
||||
# The hou.Error is not inherited from a Python Exception class,
|
||||
# so we explicitly capture the houdini error, otherwise pyblish
|
||||
# will remain hanging.
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
raise RuntimeError("Render failed: {0}".format(exc))
|
||||
|
|
|
|||
|
|
@ -1,6 +1,26 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Houdini specific Avalon/Pyblish plugin definitions."""
|
||||
import sys
|
||||
from avalon import houdini
|
||||
import six
|
||||
|
||||
import hou
|
||||
from openpype.api import PypeCreatorMixin
|
||||
|
||||
|
||||
class Creator(PypeCreatorMixin, houdini.Creator):
|
||||
class OpenPypeCreatorError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Creator(PypeCreatorMixin, houdini.Creator):
|
||||
def process(self):
|
||||
try:
|
||||
# re-raise as standard Python exception so
|
||||
# Avalon can catch it
|
||||
instance = super(Creator, self).process()
|
||||
self._process(instance)
|
||||
except hou.Error as er:
|
||||
six.reraise(
|
||||
OpenPypeCreatorError,
|
||||
OpenPypeCreatorError("Creator error: {}".format(er)),
|
||||
sys.exc_info()[2])
|
||||
|
|
|
|||
255
openpype/hosts/houdini/api/usd.py
Normal file
255
openpype/hosts/houdini/api/usd.py
Normal file
|
|
@ -0,0 +1,255 @@
|
|||
"""Houdini-specific USD Library functions."""
|
||||
|
||||
import contextlib
|
||||
|
||||
import logging
|
||||
from Qt import QtCore, QtGui
|
||||
from avalon.tools.widgets import AssetWidget
|
||||
from avalon import style
|
||||
|
||||
from pxr import Sdf
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def pick_asset(node):
|
||||
"""Show a user interface to select an Asset in the project
|
||||
|
||||
When double clicking an asset it will set the Asset value in the
|
||||
'asset' parameter.
|
||||
|
||||
"""
|
||||
|
||||
pos = QtGui.QCursor.pos()
|
||||
|
||||
parm = node.parm("asset_name")
|
||||
if not parm:
|
||||
log.error("Node has no 'asset' parameter: %s", node)
|
||||
return
|
||||
|
||||
# Construct the AssetWidget as a frameless popup so it automatically
|
||||
# closes when clicked outside of it.
|
||||
global tool
|
||||
tool = AssetWidget(silo_creatable=False)
|
||||
tool.setContentsMargins(5, 5, 5, 5)
|
||||
tool.setWindowTitle("Pick Asset")
|
||||
tool.setStyleSheet(style.load_stylesheet())
|
||||
tool.setWindowFlags(QtCore.Qt.FramelessWindowHint | QtCore.Qt.Popup)
|
||||
tool.refresh()
|
||||
|
||||
# Select the current asset if there is any
|
||||
name = parm.eval()
|
||||
if name:
|
||||
from avalon import io
|
||||
|
||||
db_asset = io.find_one({"name": name, "type": "asset"})
|
||||
if db_asset:
|
||||
silo = db_asset.get("silo")
|
||||
if silo:
|
||||
tool.set_silo(silo)
|
||||
tool.select_assets([name], expand=True)
|
||||
|
||||
# Show cursor (top right of window) near cursor
|
||||
tool.resize(250, 400)
|
||||
tool.move(tool.mapFromGlobal(pos) - QtCore.QPoint(tool.width(), 0))
|
||||
|
||||
def set_parameter_callback(index):
|
||||
name = index.data(tool.model.DocumentRole)["name"]
|
||||
parm.set(name)
|
||||
tool.close()
|
||||
|
||||
tool.view.doubleClicked.connect(set_parameter_callback)
|
||||
tool.show()
|
||||
|
||||
|
||||
def add_usd_output_processor(ropnode, processor):
|
||||
"""Add USD Output Processor to USD Rop node.
|
||||
|
||||
Args:
|
||||
ropnode (hou.RopNode): The USD Rop node.
|
||||
processor (str): The output processor name. This is the basename of
|
||||
the python file that contains the Houdini USD Output Processor.
|
||||
|
||||
"""
|
||||
|
||||
import loputils
|
||||
|
||||
loputils.handleOutputProcessorAdd(
|
||||
{
|
||||
"node": ropnode,
|
||||
"parm": ropnode.parm("outputprocessors"),
|
||||
"script_value": processor,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def remove_usd_output_processor(ropnode, processor):
|
||||
"""Removes USD Output Processor from USD Rop node.
|
||||
|
||||
Args:
|
||||
ropnode (hou.RopNode): The USD Rop node.
|
||||
processor (str): The output processor name. This is the basename of
|
||||
the python file that contains the Houdini USD Output Processor.
|
||||
|
||||
"""
|
||||
import loputils
|
||||
|
||||
parm = ropnode.parm(processor + "_remove")
|
||||
if not parm:
|
||||
raise RuntimeError(
|
||||
"Output Processor %s does not "
|
||||
"exist on %s" % (processor, ropnode.name())
|
||||
)
|
||||
|
||||
loputils.handleOutputProcessorRemove({"node": ropnode, "parm": parm})
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def outputprocessors(ropnode, processors=tuple(), disable_all_others=True):
|
||||
"""Context manager to temporarily add Output Processors to USD ROP node.
|
||||
|
||||
Args:
|
||||
ropnode (hou.RopNode): The USD Rop node.
|
||||
processors (tuple or list): The processors to add.
|
||||
disable_all_others (bool, Optional): Whether to disable all
|
||||
output processors currently on the ROP node that are not in the
|
||||
`processors` list passed to this function.
|
||||
|
||||
"""
|
||||
# TODO: Add support for forcing the correct Order of the processors
|
||||
|
||||
original = []
|
||||
prefix = "enableoutputprocessor_"
|
||||
processor_parms = ropnode.globParms(prefix + "*")
|
||||
for parm in processor_parms:
|
||||
original.append((parm, parm.eval()))
|
||||
|
||||
if disable_all_others:
|
||||
for parm in processor_parms:
|
||||
parm.set(False)
|
||||
|
||||
added = []
|
||||
for processor in processors:
|
||||
|
||||
parm = ropnode.parm(prefix + processor)
|
||||
if parm:
|
||||
# If processor already exists, just enable it
|
||||
parm.set(True)
|
||||
|
||||
else:
|
||||
# Else add the new processor
|
||||
add_usd_output_processor(ropnode, processor)
|
||||
added.append(processor)
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
|
||||
# Remove newly added processors
|
||||
for processor in added:
|
||||
remove_usd_output_processor(ropnode, processor)
|
||||
|
||||
# Revert to original values
|
||||
for parm, value in original:
|
||||
if parm:
|
||||
parm.set(value)
|
||||
|
||||
|
||||
def get_usd_rop_loppath(node):
|
||||
|
||||
# Get sop path
|
||||
node_type = node.type().name()
|
||||
if node_type == "usd":
|
||||
return node.parm("loppath").evalAsNode()
|
||||
|
||||
elif node_type in {"usd_rop", "usdrender_rop"}:
|
||||
# Inside Solaris e.g. /stage (not in ROP context)
|
||||
# When incoming connection is present it takes it directly
|
||||
inputs = node.inputs()
|
||||
if inputs:
|
||||
return inputs[0]
|
||||
else:
|
||||
return node.parm("loppath").evalAsNode()
|
||||
|
||||
|
||||
def get_layer_save_path(layer):
|
||||
"""Get custom HoudiniLayerInfo->HoudiniSavePath from SdfLayer.
|
||||
|
||||
Args:
|
||||
layer (pxr.Sdf.Layer): The Layer to retrieve the save pah data from.
|
||||
|
||||
Returns:
|
||||
str or None: Path to save to when data exists.
|
||||
|
||||
"""
|
||||
hou_layer_info = layer.rootPrims.get("HoudiniLayerInfo")
|
||||
if not hou_layer_info:
|
||||
return
|
||||
|
||||
save_path = hou_layer_info.customData.get("HoudiniSavePath", None)
|
||||
if save_path:
|
||||
# Unfortunately this doesn't actually resolve the full absolute path
|
||||
return layer.ComputeAbsolutePath(save_path)
|
||||
|
||||
|
||||
def get_referenced_layers(layer):
|
||||
"""Return SdfLayers for all external references of the current layer
|
||||
|
||||
Args:
|
||||
layer (pxr.Sdf.Layer): The Layer to retrieve the save pah data from.
|
||||
|
||||
Returns:
|
||||
list: List of pxr.Sdf.Layer that are external references to this layer
|
||||
|
||||
"""
|
||||
|
||||
layers = []
|
||||
for layer_id in layer.GetExternalReferences():
|
||||
layer = Sdf.Layer.Find(layer_id)
|
||||
if not layer:
|
||||
# A file may not be in memory and is
|
||||
# referenced from disk. As such it cannot
|
||||
# be found. We will ignore those layers.
|
||||
continue
|
||||
|
||||
layers.append(layer)
|
||||
|
||||
return layers
|
||||
|
||||
|
||||
def iter_layer_recursive(layer):
|
||||
"""Recursively iterate all 'external' referenced layers"""
|
||||
|
||||
layers = get_referenced_layers(layer)
|
||||
traversed = set(layers) # Avoid recursion to itself (if even possible)
|
||||
traverse = list(layers)
|
||||
for layer in traverse:
|
||||
|
||||
# Include children layers (recursion)
|
||||
children_layers = get_referenced_layers(layer)
|
||||
children_layers = [x for x in children_layers if x not in traversed]
|
||||
traverse.extend(children_layers)
|
||||
traversed.update(children_layers)
|
||||
|
||||
yield layer
|
||||
|
||||
|
||||
def get_configured_save_layers(usd_rop):
|
||||
|
||||
lop_node = get_usd_rop_loppath(usd_rop)
|
||||
stage = lop_node.stage(apply_viewport_overrides=False)
|
||||
if not stage:
|
||||
raise RuntimeError(
|
||||
"No valid USD stage for ROP node: " "%s" % usd_rop.path()
|
||||
)
|
||||
|
||||
root_layer = stage.GetRootLayer()
|
||||
|
||||
save_layers = []
|
||||
for layer in iter_layer_recursive(root_layer):
|
||||
save_path = get_layer_save_path(layer)
|
||||
if save_path is not None:
|
||||
save_layers.append(layer)
|
||||
|
||||
return save_layers
|
||||
18
openpype/hosts/houdini/hooks/set_paths.py
Normal file
18
openpype/hosts/houdini/hooks/set_paths.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
from openpype.lib import PreLaunchHook
|
||||
import os
|
||||
|
||||
|
||||
class SetPath(PreLaunchHook):
|
||||
"""Set current dir to workdir.
|
||||
|
||||
Hook `GlobalHostDataHook` must be executed before this hook.
|
||||
"""
|
||||
app_groups = ["houdini"]
|
||||
|
||||
def execute(self):
|
||||
workdir = self.launch_context.env.get("AVALON_WORKDIR", "")
|
||||
if not workdir:
|
||||
self.log.warning("BUG: Workdir is not filled.")
|
||||
return
|
||||
|
||||
os.chdir(workdir)
|
||||
|
|
@ -18,28 +18,29 @@ class CreateAlembicCamera(plugin.Creator):
|
|||
# Set node type to create for output
|
||||
self.data.update({"node_type": "alembic"})
|
||||
|
||||
def process(self):
|
||||
instance = super(CreateAlembicCamera, self).process()
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
parms = {
|
||||
"filename": "$HIP/pyblish/%s.abc" % self.name,
|
||||
"use_sop_path": False
|
||||
"use_sop_path": False,
|
||||
}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
path = node.path()
|
||||
|
||||
# Split the node path into the first root and the remainder
|
||||
# So we can set the root and objects parameters correctly
|
||||
_, root, remainder = path.split("/", 2)
|
||||
parms.update({
|
||||
"root": "/" + root,
|
||||
"objects": remainder
|
||||
})
|
||||
parms.update({"root": "/" + root, "objects": remainder})
|
||||
|
||||
instance.setParms(parms)
|
||||
|
||||
# Lock the Use Sop Path setting so the
|
||||
# user doesn't accidentally enable it.
|
||||
instance.parm("use_sop_path").lock(True)
|
||||
instance.parm("trange").set(1)
|
||||
|
|
|
|||
44
openpype/hosts/houdini/plugins/create/create_composite.py
Normal file
44
openpype/hosts/houdini/plugins/create/create_composite.py
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
from openpype.hosts.houdini.api import plugin
|
||||
|
||||
|
||||
class CreateCompositeSequence(plugin.Creator):
|
||||
"""Composite ROP to Image Sequence"""
|
||||
|
||||
label = "Composite (Image Sequence)"
|
||||
family = "imagesequence"
|
||||
icon = "gears"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateCompositeSequence, self).__init__(*args, **kwargs)
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
|
||||
# Type of ROP node to create
|
||||
self.data.update({"node_type": "comp"})
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
parms = {"copoutput": "$HIP/pyblish/%s.$F4.exr" % self.name}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
parms.update({"coppath": node.path()})
|
||||
|
||||
instance.setParms(parms)
|
||||
|
||||
# Lock any parameters in this list
|
||||
to_lock = ["prim_to_detail_pattern"]
|
||||
for name in to_lock:
|
||||
try:
|
||||
parm = instance.parm(name)
|
||||
parm.lock(True)
|
||||
except AttributeError:
|
||||
# missing lock pattern
|
||||
self.log.debug(
|
||||
"missing lock pattern {}".format(name))
|
||||
|
|
@ -17,21 +17,29 @@ class CreatePointCache(plugin.Creator):
|
|||
|
||||
self.data.update({"node_type": "alembic"})
|
||||
|
||||
def process(self):
|
||||
instance = super(CreatePointCache, self).process()
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
parms = {"use_sop_path": True, # Export single node from SOP Path
|
||||
"build_from_path": True, # Direct path of primitive in output
|
||||
"path_attrib": "path", # Pass path attribute for output
|
||||
"prim_to_detail_pattern": "cbId",
|
||||
"format": 2, # Set format to Ogawa
|
||||
"filename": "$HIP/pyblish/%s.abc" % self.name}
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
parms = {
|
||||
"use_sop_path": True, # Export single node from SOP Path
|
||||
"build_from_path": True, # Direct path of primitive in output
|
||||
"path_attrib": "path", # Pass path attribute for output
|
||||
"prim_to_detail_pattern": "cbId",
|
||||
"format": 2, # Set format to Ogawa
|
||||
"facesets": 0, # No face sets (by default exclude them)
|
||||
"filename": "$HIP/pyblish/%s.abc" % self.name,
|
||||
}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
parms.update({"sop_path": node.path()})
|
||||
|
||||
instance.setParms(parms)
|
||||
instance.parm("trange").set(1)
|
||||
|
||||
# Lock any parameters in this list
|
||||
to_lock = ["prim_to_detail_pattern"]
|
||||
|
|
|
|||
70
openpype/hosts/houdini/plugins/create/create_redshift_rop.py
Normal file
70
openpype/hosts/houdini/plugins/create/create_redshift_rop.py
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
import hou
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
|
||||
|
||||
class CreateRedshiftROP(plugin.Creator):
|
||||
"""Redshift ROP"""
|
||||
|
||||
label = "Redshift ROP"
|
||||
family = "redshift_rop"
|
||||
icon = "magic"
|
||||
defaults = ["master"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateRedshiftROP, self).__init__(*args, **kwargs)
|
||||
|
||||
# Clear the family prefix from the subset
|
||||
subset = self.data["subset"]
|
||||
subset_no_prefix = subset[len(self.family):]
|
||||
subset_no_prefix = subset_no_prefix[0].lower() + subset_no_prefix[1:]
|
||||
self.data["subset"] = subset_no_prefix
|
||||
|
||||
# Add chunk size attribute
|
||||
self.data["chunkSize"] = 10
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
|
||||
self.data.update({"node_type": "Redshift_ROP"})
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
basename = instance.name()
|
||||
instance.setName(basename + "_ROP", unique_name=True)
|
||||
|
||||
# Also create the linked Redshift IPR Rop
|
||||
try:
|
||||
ipr_rop = self.parent.createNode(
|
||||
"Redshift_IPR", node_name=basename + "_IPR"
|
||||
)
|
||||
except hou.OperationFailed:
|
||||
raise Exception(("Cannot create Redshift node. Is Redshift "
|
||||
"installed and enabled?"))
|
||||
|
||||
# Move it to directly under the Redshift ROP
|
||||
ipr_rop.setPosition(instance.position() + hou.Vector2(0, -1))
|
||||
|
||||
# Set the linked rop to the Redshift ROP
|
||||
ipr_rop.parm("linked_rop").set(ipr_rop.relativePathTo(instance))
|
||||
|
||||
prefix = '${HIP}/render/${HIPNAME}/`chs("subset")`.${AOV}.$F4.exr'
|
||||
parms = {
|
||||
# Render frame range
|
||||
"trange": 1,
|
||||
# Redshift ROP settings
|
||||
"RS_outputFileNamePrefix": prefix,
|
||||
"RS_outputMultilayerMode": 0, # no multi-layered exr
|
||||
"RS_outputBeautyAOVSuffix": "beauty",
|
||||
}
|
||||
instance.setParms(parms)
|
||||
|
||||
# Lock some Avalon attributes
|
||||
to_lock = ["family", "id"]
|
||||
for name in to_lock:
|
||||
parm = instance.parm(name)
|
||||
parm.lock(True)
|
||||
47
openpype/hosts/houdini/plugins/create/create_usd.py
Normal file
47
openpype/hosts/houdini/plugins/create/create_usd.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
from openpype.hosts.houdini.api import plugin
|
||||
|
||||
|
||||
class CreateUSD(plugin.Creator):
|
||||
"""Universal Scene Description"""
|
||||
|
||||
label = "USD (experimental)"
|
||||
family = "usd"
|
||||
icon = "gears"
|
||||
enabled = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateUSD, self).__init__(*args, **kwargs)
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
|
||||
self.data.update({"node_type": "usd"})
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
parms = {
|
||||
"lopoutput": "$HIP/pyblish/%s.usd" % self.name,
|
||||
"enableoutputprocessor_simplerelativepaths": False,
|
||||
}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
parms.update({"loppath": node.path()})
|
||||
|
||||
instance.setParms(parms)
|
||||
|
||||
# Lock any parameters in this list
|
||||
to_lock = [
|
||||
"fileperframe",
|
||||
# Lock some Avalon attributes
|
||||
"family",
|
||||
"id",
|
||||
]
|
||||
for name in to_lock:
|
||||
parm = instance.parm(name)
|
||||
parm.lock(True)
|
||||
42
openpype/hosts/houdini/plugins/create/create_usdrender.py
Normal file
42
openpype/hosts/houdini/plugins/create/create_usdrender.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
import hou
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
|
||||
|
||||
class CreateUSDRender(plugin.Creator):
|
||||
"""USD Render ROP in /stage"""
|
||||
|
||||
label = "USD Render (experimental)"
|
||||
family = "usdrender"
|
||||
icon = "magic"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateUSDRender, self).__init__(*args, **kwargs)
|
||||
|
||||
self.parent = hou.node("/stage")
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
|
||||
self.data.update({"node_type": "usdrender"})
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
parms = {
|
||||
# Render frame range
|
||||
"trange": 1
|
||||
}
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
parms.update({"loppath": node.path()})
|
||||
instance.setParms(parms)
|
||||
|
||||
# Lock some Avalon attributes
|
||||
to_lock = ["family", "id"]
|
||||
for name in to_lock:
|
||||
parm = instance.parm(name)
|
||||
parm.lock(True)
|
||||
|
|
@ -18,11 +18,18 @@ class CreateVDBCache(plugin.Creator):
|
|||
# Set node type to create for output
|
||||
self.data["node_type"] = "geometry"
|
||||
|
||||
def process(self):
|
||||
instance = super(CreateVDBCache, self).process()
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
parms = {"sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name,
|
||||
"initsim": True}
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
parms = {
|
||||
"sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name,
|
||||
"initsim": True,
|
||||
"trange": 1
|
||||
}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
|
|
|
|||
86
openpype/hosts/houdini/plugins/load/actions.py
Normal file
86
openpype/hosts/houdini/plugins/load/actions.py
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
"""A module containing generic loader actions that will display in the Loader.
|
||||
|
||||
"""
|
||||
|
||||
from avalon import api
|
||||
|
||||
|
||||
class SetFrameRangeLoader(api.Loader):
|
||||
"""Set Houdini frame range"""
|
||||
|
||||
families = [
|
||||
"animation",
|
||||
"camera",
|
||||
"pointcache",
|
||||
"vdbcache",
|
||||
"usd",
|
||||
]
|
||||
representations = ["abc", "vdb", "usd"]
|
||||
|
||||
label = "Set frame range"
|
||||
order = 11
|
||||
icon = "clock-o"
|
||||
color = "white"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
|
||||
import hou
|
||||
|
||||
version = context["version"]
|
||||
version_data = version.get("data", {})
|
||||
|
||||
start = version_data.get("startFrame", None)
|
||||
end = version_data.get("endFrame", None)
|
||||
|
||||
if start is None or end is None:
|
||||
print(
|
||||
"Skipping setting frame range because start or "
|
||||
"end frame data is missing.."
|
||||
)
|
||||
return
|
||||
|
||||
hou.playbar.setFrameRange(start, end)
|
||||
hou.playbar.setPlaybackRange(start, end)
|
||||
|
||||
|
||||
class SetFrameRangeWithHandlesLoader(api.Loader):
|
||||
"""Set Maya frame range including pre- and post-handles"""
|
||||
|
||||
families = [
|
||||
"animation",
|
||||
"camera",
|
||||
"pointcache",
|
||||
"vdbcache",
|
||||
"usd",
|
||||
]
|
||||
representations = ["abc", "vdb", "usd"]
|
||||
|
||||
label = "Set frame range (with handles)"
|
||||
order = 12
|
||||
icon = "clock-o"
|
||||
color = "white"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
|
||||
import hou
|
||||
|
||||
version = context["version"]
|
||||
version_data = version.get("data", {})
|
||||
|
||||
start = version_data.get("startFrame", None)
|
||||
end = version_data.get("endFrame", None)
|
||||
|
||||
if start is None or end is None:
|
||||
print(
|
||||
"Skipping setting frame range because start or "
|
||||
"end frame data is missing.."
|
||||
)
|
||||
return
|
||||
|
||||
# Include handles
|
||||
handles = version_data.get("handles", 0)
|
||||
start -= handles
|
||||
end += handles
|
||||
|
||||
hou.playbar.setFrameRange(start, end)
|
||||
hou.playbar.setPlaybackRange(start, end)
|
||||
|
|
@ -6,9 +6,7 @@ from avalon.houdini import pipeline, lib
|
|||
class AbcLoader(api.Loader):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
|
||||
families = ["model",
|
||||
"animation",
|
||||
"pointcache"]
|
||||
families = ["model", "animation", "pointcache", "gpuCache"]
|
||||
label = "Load Alembic"
|
||||
representations = ["abc"]
|
||||
order = -10
|
||||
|
|
@ -68,8 +66,9 @@ class AbcLoader(api.Loader):
|
|||
null = container.createNode("null", node_name="OUT".format(name))
|
||||
null.setInput(0, normal_node)
|
||||
|
||||
# Set display on last node
|
||||
null.setDisplayFlag(True)
|
||||
# Ensure display flag is on the Alembic input node and not on the OUT
|
||||
# node to optimize "debug" displaying in the viewport.
|
||||
alembic.setDisplayFlag(True)
|
||||
|
||||
# Set new position for unpack node else it gets cluttered
|
||||
nodes = [container, alembic, unpack, normal_node, null]
|
||||
|
|
@ -78,18 +77,22 @@ class AbcLoader(api.Loader):
|
|||
|
||||
self[:] = nodes
|
||||
|
||||
return pipeline.containerise(node_name,
|
||||
namespace,
|
||||
nodes,
|
||||
context,
|
||||
self.__class__.__name__)
|
||||
return pipeline.containerise(
|
||||
node_name,
|
||||
namespace,
|
||||
nodes,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
suffix="",
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
node = container["node"]
|
||||
try:
|
||||
alembic_node = next(n for n in node.children() if
|
||||
n.type().name() == "alembic")
|
||||
alembic_node = next(
|
||||
n for n in node.children() if n.type().name() == "alembic"
|
||||
)
|
||||
except StopIteration:
|
||||
self.log.error("Could not find node of type `alembic`")
|
||||
return
|
||||
|
|
|
|||
|
|
@ -1,8 +1,79 @@
|
|||
from avalon import api
|
||||
|
||||
from avalon.houdini import pipeline, lib
|
||||
|
||||
|
||||
ARCHIVE_EXPRESSION = ('__import__("_alembic_hom_extensions")'
|
||||
'.alembicGetCameraDict')
|
||||
|
||||
|
||||
def transfer_non_default_values(src, dest, ignore=None):
|
||||
"""Copy parm from src to dest.
|
||||
|
||||
Because the Alembic Archive rebuilds the entire node
|
||||
hierarchy on triggering "Build Hierarchy" we want to
|
||||
preserve any local tweaks made by the user on the camera
|
||||
for ease of use. That could be a background image, a
|
||||
resolution change or even Redshift camera parameters.
|
||||
|
||||
We try to do so by finding all Parms that exist on both
|
||||
source and destination node, include only those that both
|
||||
are not at their default value, they must be visible,
|
||||
we exclude those that have the special "alembic archive"
|
||||
channel expression and ignore certain Parm types.
|
||||
|
||||
"""
|
||||
import hou
|
||||
|
||||
src.updateParmStates()
|
||||
|
||||
for parm in src.allParms():
|
||||
|
||||
if ignore and parm.name() in ignore:
|
||||
continue
|
||||
|
||||
# If destination parm does not exist, ignore..
|
||||
dest_parm = dest.parm(parm.name())
|
||||
if not dest_parm:
|
||||
continue
|
||||
|
||||
# Ignore values that are currently at default
|
||||
if parm.isAtDefault() and dest_parm.isAtDefault():
|
||||
continue
|
||||
|
||||
if not parm.isVisible():
|
||||
# Ignore hidden parameters, assume they
|
||||
# are implementation details
|
||||
continue
|
||||
|
||||
expression = None
|
||||
try:
|
||||
expression = parm.expression()
|
||||
except hou.OperationFailed:
|
||||
# No expression present
|
||||
pass
|
||||
|
||||
if expression is not None and ARCHIVE_EXPRESSION in expression:
|
||||
# Assume it's part of the automated connections that the
|
||||
# Alembic Archive makes on loading of the camera and thus we do
|
||||
# not want to transfer the expression
|
||||
continue
|
||||
|
||||
# Ignore folders, separators, etc.
|
||||
ignore_types = {
|
||||
hou.parmTemplateType.Toggle,
|
||||
hou.parmTemplateType.Menu,
|
||||
hou.parmTemplateType.Button,
|
||||
hou.parmTemplateType.FolderSet,
|
||||
hou.parmTemplateType.Separator,
|
||||
hou.parmTemplateType.Label,
|
||||
}
|
||||
if parm.parmTemplate().type() in ignore_types:
|
||||
continue
|
||||
|
||||
print("Preserving attribute: %s" % parm.name())
|
||||
dest_parm.setFromParm(parm)
|
||||
|
||||
|
||||
class CameraLoader(api.Loader):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
|
||||
|
|
@ -30,7 +101,7 @@ class CameraLoader(api.Loader):
|
|||
counter = 1
|
||||
asset_name = context["asset"]["name"]
|
||||
|
||||
namespace = namespace if namespace else asset_name
|
||||
namespace = namespace or asset_name
|
||||
formatted = "{}_{}".format(namespace, name) if namespace else name
|
||||
node_name = "{0}_{1:03d}".format(formatted, counter)
|
||||
|
||||
|
|
@ -59,7 +130,8 @@ class CameraLoader(api.Loader):
|
|||
namespace,
|
||||
nodes,
|
||||
context,
|
||||
self.__class__.__name__)
|
||||
self.__class__.__name__,
|
||||
suffix="")
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
|
|
@ -73,14 +145,40 @@ class CameraLoader(api.Loader):
|
|||
node.setParms({"fileName": file_path,
|
||||
"representation": str(representation["_id"])})
|
||||
|
||||
# Store the cam temporarily next to the Alembic Archive
|
||||
# so that we can preserve parm values the user set on it
|
||||
# after build hierarchy was triggered.
|
||||
old_camera = self._get_camera(node)
|
||||
temp_camera = old_camera.copyTo(node.parent())
|
||||
|
||||
# Rebuild
|
||||
node.parm("buildHierarchy").pressButton()
|
||||
|
||||
# Apply values to the new camera
|
||||
new_camera = self._get_camera(node)
|
||||
transfer_non_default_values(temp_camera,
|
||||
new_camera,
|
||||
# The hidden uniform scale attribute
|
||||
# gets a default connection to
|
||||
# "icon_scale" just skip that completely
|
||||
ignore={"scale"})
|
||||
|
||||
temp_camera.destroy()
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
|
||||
def _get_camera(self, node):
|
||||
import hou
|
||||
cameras = node.recursiveGlob("*",
|
||||
filter=hou.nodeTypeFilter.ObjCamera,
|
||||
include_subnets=False)
|
||||
|
||||
assert len(cameras) == 1, "Camera instance must have only one camera"
|
||||
return cameras[0]
|
||||
|
||||
def create_and_connect(self, node, node_type, name=None):
|
||||
"""Create a node within a node which and connect it to the input
|
||||
|
||||
|
|
@ -93,27 +191,10 @@ class CameraLoader(api.Loader):
|
|||
hou.Node
|
||||
|
||||
"""
|
||||
if name:
|
||||
new_node = node.createNode(node_type, node_name=name)
|
||||
else:
|
||||
new_node = node.createNode(node_type)
|
||||
|
||||
import hou
|
||||
|
||||
try:
|
||||
|
||||
if name:
|
||||
new_node = node.createNode(node_type, node_name=name)
|
||||
else:
|
||||
new_node = node.createNode(node_type)
|
||||
|
||||
new_node.moveToGoodPosition()
|
||||
|
||||
try:
|
||||
input_node = next(i for i in node.allItems() if
|
||||
isinstance(i, hou.SubnetIndirectInput))
|
||||
except StopIteration:
|
||||
return new_node
|
||||
|
||||
new_node.setInput(0, input_node)
|
||||
return new_node
|
||||
|
||||
except Exception:
|
||||
raise RuntimeError("Could not created node type `%s` in node `%s`"
|
||||
% (node_type, node))
|
||||
new_node.moveToGoodPosition()
|
||||
return new_node
|
||||
|
|
|
|||
123
openpype/hosts/houdini/plugins/load/load_image.py
Normal file
123
openpype/hosts/houdini/plugins/load/load_image.py
Normal file
|
|
@ -0,0 +1,123 @@
|
|||
import os
|
||||
|
||||
from avalon import api
|
||||
from avalon.houdini import pipeline, lib
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
def get_image_avalon_container():
|
||||
"""The COP2 files must be in a COP2 network.
|
||||
|
||||
So we maintain a single entry point within AVALON_CONTAINERS,
|
||||
just for ease of use.
|
||||
|
||||
"""
|
||||
|
||||
path = pipeline.AVALON_CONTAINERS
|
||||
avalon_container = hou.node(path)
|
||||
if not avalon_container:
|
||||
# Let's create avalon container secretly
|
||||
# but make sure the pipeline still is built the
|
||||
# way we anticipate it was built, asserting it.
|
||||
assert path == "/obj/AVALON_CONTAINERS"
|
||||
|
||||
parent = hou.node("/obj")
|
||||
avalon_container = parent.createNode(
|
||||
"subnet", node_name="AVALON_CONTAINERS"
|
||||
)
|
||||
|
||||
image_container = hou.node(path + "/IMAGES")
|
||||
if not image_container:
|
||||
image_container = avalon_container.createNode(
|
||||
"cop2net", node_name="IMAGES"
|
||||
)
|
||||
image_container.moveToGoodPosition()
|
||||
|
||||
return image_container
|
||||
|
||||
|
||||
class ImageLoader(api.Loader):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
|
||||
families = ["colorbleed.imagesequence"]
|
||||
label = "Load Image (COP2)"
|
||||
representations = ["*"]
|
||||
order = -10
|
||||
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
# Format file name, Houdini only wants forward slashes
|
||||
file_path = os.path.normpath(self.fname)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
file_path = self._get_file_sequence(file_path)
|
||||
|
||||
# Get the root node
|
||||
parent = get_image_avalon_container()
|
||||
|
||||
# Define node name
|
||||
namespace = namespace if namespace else context["asset"]["name"]
|
||||
node_name = "{}_{}".format(namespace, name) if namespace else name
|
||||
|
||||
node = parent.createNode("file", node_name=node_name)
|
||||
node.moveToGoodPosition()
|
||||
|
||||
node.setParms({"filename1": file_path})
|
||||
|
||||
# Imprint it manually
|
||||
data = {
|
||||
"schema": "avalon-core:container-2.0",
|
||||
"id": pipeline.AVALON_CONTAINER_ID,
|
||||
"name": node_name,
|
||||
"namespace": namespace,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
}
|
||||
|
||||
# todo: add folder="Avalon"
|
||||
lib.imprint(node, data)
|
||||
|
||||
return node
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
node = container["node"]
|
||||
|
||||
# Update the file path
|
||||
file_path = api.get_representation_path(representation)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
file_path = self._get_file_sequence(file_path)
|
||||
|
||||
# Update attributes
|
||||
node.setParms(
|
||||
{
|
||||
"filename1": file_path,
|
||||
"representation": str(representation["_id"]),
|
||||
}
|
||||
)
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
|
||||
# Let's clean up the IMAGES COP2 network
|
||||
# if it ends up being empty and we deleted
|
||||
# the last file node. Store the parent
|
||||
# before we delete the node.
|
||||
parent = node.parent()
|
||||
|
||||
node.destroy()
|
||||
|
||||
if not parent.children():
|
||||
parent.destroy()
|
||||
|
||||
def _get_file_sequence(self, root):
|
||||
files = sorted(os.listdir(root))
|
||||
|
||||
first_fname = files[0]
|
||||
prefix, padding, suffix = first_fname.rsplit(".", 2)
|
||||
fname = ".".join([prefix, "$F{}".format(len(padding)), suffix])
|
||||
return os.path.join(root, fname).replace("\\", "/")
|
||||
80
openpype/hosts/houdini/plugins/load/load_usd_layer.py
Normal file
80
openpype/hosts/houdini/plugins/load/load_usd_layer.py
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
from avalon import api
|
||||
from avalon.houdini import pipeline, lib
|
||||
|
||||
|
||||
class USDSublayerLoader(api.Loader):
|
||||
"""Sublayer USD file in Solaris"""
|
||||
|
||||
families = [
|
||||
"colorbleed.usd",
|
||||
"colorbleed.pointcache",
|
||||
"colorbleed.animation",
|
||||
"colorbleed.camera",
|
||||
"usdCamera",
|
||||
]
|
||||
label = "Sublayer USD"
|
||||
representations = ["usd", "usda", "usdlc", "usdnc", "abc"]
|
||||
order = 1
|
||||
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
import os
|
||||
import hou
|
||||
|
||||
# Format file name, Houdini only wants forward slashes
|
||||
file_path = os.path.normpath(self.fname)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
# Get the root node
|
||||
stage = hou.node("/stage")
|
||||
|
||||
# Define node name
|
||||
namespace = namespace if namespace else context["asset"]["name"]
|
||||
node_name = "{}_{}".format(namespace, name) if namespace else name
|
||||
|
||||
# Create USD reference
|
||||
container = stage.createNode("sublayer", node_name=node_name)
|
||||
container.setParms({"filepath1": file_path})
|
||||
container.moveToGoodPosition()
|
||||
|
||||
# Imprint it manually
|
||||
data = {
|
||||
"schema": "avalon-core:container-2.0",
|
||||
"id": pipeline.AVALON_CONTAINER_ID,
|
||||
"name": node_name,
|
||||
"namespace": namespace,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
}
|
||||
|
||||
# todo: add folder="Avalon"
|
||||
lib.imprint(container, data)
|
||||
|
||||
return container
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
node = container["node"]
|
||||
|
||||
# Update the file path
|
||||
file_path = api.get_representation_path(representation)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
# Update attributes
|
||||
node.setParms(
|
||||
{
|
||||
"filepath1": file_path,
|
||||
"representation": str(representation["_id"]),
|
||||
}
|
||||
)
|
||||
|
||||
# Reload files
|
||||
node.parm("reload").pressButton()
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
80
openpype/hosts/houdini/plugins/load/load_usd_reference.py
Normal file
80
openpype/hosts/houdini/plugins/load/load_usd_reference.py
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
from avalon import api
|
||||
from avalon.houdini import pipeline, lib
|
||||
|
||||
|
||||
class USDReferenceLoader(api.Loader):
|
||||
"""Reference USD file in Solaris"""
|
||||
|
||||
families = [
|
||||
"colorbleed.usd",
|
||||
"colorbleed.pointcache",
|
||||
"colorbleed.animation",
|
||||
"colorbleed.camera",
|
||||
"usdCamera",
|
||||
]
|
||||
label = "Reference USD"
|
||||
representations = ["usd", "usda", "usdlc", "usdnc", "abc"]
|
||||
order = -8
|
||||
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
import os
|
||||
import hou
|
||||
|
||||
# Format file name, Houdini only wants forward slashes
|
||||
file_path = os.path.normpath(self.fname)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
# Get the root node
|
||||
stage = hou.node("/stage")
|
||||
|
||||
# Define node name
|
||||
namespace = namespace if namespace else context["asset"]["name"]
|
||||
node_name = "{}_{}".format(namespace, name) if namespace else name
|
||||
|
||||
# Create USD reference
|
||||
container = stage.createNode("reference", node_name=node_name)
|
||||
container.setParms({"filepath1": file_path})
|
||||
container.moveToGoodPosition()
|
||||
|
||||
# Imprint it manually
|
||||
data = {
|
||||
"schema": "avalon-core:container-2.0",
|
||||
"id": pipeline.AVALON_CONTAINER_ID,
|
||||
"name": node_name,
|
||||
"namespace": namespace,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
}
|
||||
|
||||
# todo: add folder="Avalon"
|
||||
lib.imprint(container, data)
|
||||
|
||||
return container
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
node = container["node"]
|
||||
|
||||
# Update the file path
|
||||
file_path = api.get_representation_path(representation)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
# Update attributes
|
||||
node.setParms(
|
||||
{
|
||||
"filepath1": file_path,
|
||||
"representation": str(representation["_id"]),
|
||||
}
|
||||
)
|
||||
|
||||
# Reload files
|
||||
node.parm("reload").pressButton()
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
110
openpype/hosts/houdini/plugins/load/load_vdb.py
Normal file
110
openpype/hosts/houdini/plugins/load/load_vdb.py
Normal file
|
|
@ -0,0 +1,110 @@
|
|||
import os
|
||||
import re
|
||||
from avalon import api
|
||||
|
||||
from avalon.houdini import pipeline
|
||||
|
||||
|
||||
class VdbLoader(api.Loader):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
|
||||
families = ["vdbcache"]
|
||||
label = "Load VDB"
|
||||
representations = ["vdb"]
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
import hou
|
||||
|
||||
# Get the root node
|
||||
obj = hou.node("/obj")
|
||||
|
||||
# Define node name
|
||||
namespace = namespace if namespace else context["asset"]["name"]
|
||||
node_name = "{}_{}".format(namespace, name) if namespace else name
|
||||
|
||||
# Create a new geo node
|
||||
container = obj.createNode("geo", node_name=node_name)
|
||||
|
||||
# Remove the file node, it only loads static meshes
|
||||
# Houdini 17 has removed the file node from the geo node
|
||||
file_node = container.node("file1")
|
||||
if file_node:
|
||||
file_node.destroy()
|
||||
|
||||
# Explicitly create a file node
|
||||
file_node = container.createNode("file", node_name=node_name)
|
||||
file_node.setParms({"file": self.format_path(self.fname)})
|
||||
|
||||
# Set display on last node
|
||||
file_node.setDisplayFlag(True)
|
||||
|
||||
nodes = [container, file_node]
|
||||
self[:] = nodes
|
||||
|
||||
return pipeline.containerise(
|
||||
node_name,
|
||||
namespace,
|
||||
nodes,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
suffix="",
|
||||
)
|
||||
|
||||
def format_path(self, path):
|
||||
"""Format file path correctly for single vdb or vdb sequence."""
|
||||
if not os.path.exists(path):
|
||||
raise RuntimeError("Path does not exist: %s" % path)
|
||||
|
||||
# The path is either a single file or sequence in a folder.
|
||||
is_single_file = os.path.isfile(path)
|
||||
if is_single_file:
|
||||
filename = path
|
||||
else:
|
||||
# The path points to the publish .vdb sequence folder so we
|
||||
# find the first file in there that ends with .vdb
|
||||
files = sorted(os.listdir(path))
|
||||
first = next((x for x in files if x.endswith(".vdb")), None)
|
||||
if first is None:
|
||||
raise RuntimeError(
|
||||
"Couldn't find first .vdb file of "
|
||||
"sequence in: %s" % path
|
||||
)
|
||||
|
||||
# Set <frame>.vdb to $F.vdb
|
||||
first = re.sub(r"\.(\d+)\.vdb$", ".$F.vdb", first)
|
||||
|
||||
filename = os.path.join(path, first)
|
||||
|
||||
filename = os.path.normpath(filename)
|
||||
filename = filename.replace("\\", "/")
|
||||
|
||||
return filename
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
node = container["node"]
|
||||
try:
|
||||
file_node = next(
|
||||
n for n in node.children() if n.type().name() == "file"
|
||||
)
|
||||
except StopIteration:
|
||||
self.log.error("Could not find node of type `alembic`")
|
||||
return
|
||||
|
||||
# Update the file path
|
||||
file_path = api.get_representation_path(representation)
|
||||
file_path = self.format_path(file_path)
|
||||
|
||||
file_node.setParms({"fileName": file_path})
|
||||
|
||||
# Update attribute
|
||||
node.setParms({"representation": str(representation["_id"])})
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
43
openpype/hosts/houdini/plugins/load/show_usdview.py
Normal file
43
openpype/hosts/houdini/plugins/load/show_usdview.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
from avalon import api
|
||||
|
||||
|
||||
class ShowInUsdview(api.Loader):
|
||||
"""Open USD file in usdview"""
|
||||
|
||||
families = ["colorbleed.usd"]
|
||||
label = "Show in usdview"
|
||||
representations = ["usd", "usda", "usdlc", "usdnc"]
|
||||
order = 10
|
||||
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import avalon.lib as lib
|
||||
|
||||
usdview = lib.which("usdview")
|
||||
|
||||
filepath = os.path.normpath(self.fname)
|
||||
filepath = filepath.replace("\\", "/")
|
||||
|
||||
if not os.path.exists(filepath):
|
||||
self.log.error("File does not exist: %s" % filepath)
|
||||
return
|
||||
|
||||
self.log.info("Start houdini variant of usdview...")
|
||||
|
||||
# For now avoid some pipeline environment variables that initialize
|
||||
# Avalon in Houdini as it is redundant for usdview and slows boot time
|
||||
env = os.environ.copy()
|
||||
env.pop("PYTHONPATH", None)
|
||||
env.pop("HOUDINI_SCRIPT_PATH", None)
|
||||
env.pop("HOUDINI_MENU_PATH", None)
|
||||
|
||||
# Force string to avoid unicode issues
|
||||
env = {str(key): str(value) for key, value in env.items()}
|
||||
|
||||
subprocess.Popen([usdview, filepath, "--renderer", "GL"], env=env)
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectInstanceActiveState(pyblish.api.InstancePlugin):
|
||||
"""Collect default active state for instance from its node bypass state.
|
||||
|
||||
This is done at the very end of the CollectorOrder so that any required
|
||||
collecting of data iterating over instances (with InstancePlugin) will
|
||||
actually collect the data for when the user enables the state in the UI.
|
||||
Otherwise potentially required data might have skipped collecting.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.299
|
||||
families = ["*"]
|
||||
hosts = ["houdini"]
|
||||
label = "Instance Active State"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Must have node to check for bypass state
|
||||
if len(instance) == 0:
|
||||
return
|
||||
|
||||
# Check bypass state and reverse
|
||||
node = instance[0]
|
||||
active = not node.isBypassed()
|
||||
|
||||
# Set instance active state
|
||||
instance.data.update(
|
||||
{
|
||||
"active": active,
|
||||
# temporarily translation of `active` to `publish` till
|
||||
# issue has been resolved:
|
||||
# https://github.com/pyblish/pyblish-base/issues/307
|
||||
"publish": active,
|
||||
}
|
||||
)
|
||||
|
|
@ -9,7 +9,7 @@ class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin):
|
|||
|
||||
order = pyblish.api.CollectorOrder - 0.5
|
||||
label = "Houdini Current File"
|
||||
hosts = ['houdini']
|
||||
hosts = ["houdini"]
|
||||
|
||||
def process(self, context):
|
||||
"""Inject the current working file"""
|
||||
|
|
@ -27,8 +27,10 @@ class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin):
|
|||
# could have existed already. We will allow it if the file exists,
|
||||
# but show a warning for this edge case to clarify the potential
|
||||
# false positive.
|
||||
self.log.warning("Current file is 'untitled.hip' and we are "
|
||||
"unable to detect whether the current scene is "
|
||||
"saved correctly.")
|
||||
self.log.warning(
|
||||
"Current file is 'untitled.hip' and we are "
|
||||
"unable to detect whether the current scene is "
|
||||
"saved correctly."
|
||||
)
|
||||
|
||||
context.data['currentFile'] = filepath
|
||||
context.data["currentFile"] = filepath
|
||||
|
|
|
|||
|
|
@ -6,11 +6,11 @@ from openpype.hosts.houdini.api import lib
|
|||
|
||||
|
||||
class CollectFrames(pyblish.api.InstancePlugin):
|
||||
"""Collect all frames which would be a resukl"""
|
||||
"""Collect all frames which would be saved from the ROP nodes"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Frames"
|
||||
families = ["vdbcache"]
|
||||
families = ["vdbcache", "imagesequence"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -19,10 +19,17 @@ class CollectFrames(pyblish.api.InstancePlugin):
|
|||
output_parm = lib.get_output_parameter(ropnode)
|
||||
output = output_parm.eval()
|
||||
|
||||
_, ext = os.path.splitext(output)
|
||||
file_name = os.path.basename(output)
|
||||
match = re.match("(\w+)\.(\d+)\.vdb", file_name)
|
||||
result = file_name
|
||||
|
||||
# Get the filename pattern match from the output
|
||||
# path so we can compute all frames that would
|
||||
# come out from rendering the ROP node if there
|
||||
# is a frame pattern in the name
|
||||
pattern = r"\w+\.(\d+)" + re.escape(ext)
|
||||
match = re.match(pattern, file_name)
|
||||
|
||||
start_frame = instance.data.get("frameStart", None)
|
||||
end_frame = instance.data.get("frameEnd", None)
|
||||
|
||||
|
|
@ -31,10 +38,12 @@ class CollectFrames(pyblish.api.InstancePlugin):
|
|||
# Check if frames are bigger than 1 (file collection)
|
||||
# override the result
|
||||
if end_frame - start_frame > 1:
|
||||
result = self.create_file_list(match,
|
||||
int(start_frame),
|
||||
int(end_frame))
|
||||
result = self.create_file_list(
|
||||
match, int(start_frame), int(end_frame)
|
||||
)
|
||||
|
||||
# todo: `frames` currently conflicts with "explicit frames" for a
|
||||
# for a custom frame list. So this should be refactored.
|
||||
instance.data.update({"frames": result})
|
||||
|
||||
def create_file_list(self, match, start_frame, end_frame):
|
||||
|
|
@ -50,17 +59,24 @@ class CollectFrames(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
# Get the padding length
|
||||
frame = match.group(1)
|
||||
padding = len(frame)
|
||||
|
||||
# Get the parts of the filename surrounding the frame number
|
||||
# so we can put our own frame numbers in.
|
||||
span = match.span(1)
|
||||
prefix = match.string[: span[0]]
|
||||
suffix = match.string[span[1]:]
|
||||
|
||||
# Generate filenames for all frames
|
||||
result = []
|
||||
for i in range(start_frame, end_frame + 1):
|
||||
|
||||
padding = len(match.group(2))
|
||||
name = match.group(1)
|
||||
padding_format = "{number:0{width}d}"
|
||||
# Format frame number by the padding amount
|
||||
str_frame = "{number:0{width}d}".format(number=i, width=padding)
|
||||
|
||||
count = start_frame
|
||||
while count <= end_frame:
|
||||
str_count = padding_format.format(number=count, width=padding)
|
||||
file_name = "{}.{}.vdb".format(name, str_count)
|
||||
file_name = prefix + str_frame + suffix
|
||||
result.append(file_name)
|
||||
count += 1
|
||||
|
||||
return result
|
||||
|
|
|
|||
120
openpype/hosts/houdini/plugins/publish/collect_inputs.py
Normal file
120
openpype/hosts/houdini/plugins/publish/collect_inputs.py
Normal file
|
|
@ -0,0 +1,120 @@
|
|||
import avalon.api as api
|
||||
import pyblish.api
|
||||
|
||||
|
||||
def collect_input_containers(nodes):
|
||||
"""Collect containers that contain any of the node in `nodes`.
|
||||
|
||||
This will return any loaded Avalon container that contains at least one of
|
||||
the nodes. As such, the Avalon container is an input for it. Or in short,
|
||||
there are member nodes of that container.
|
||||
|
||||
Returns:
|
||||
list: Input avalon containers
|
||||
|
||||
"""
|
||||
|
||||
# Lookup by node ids
|
||||
lookup = frozenset(nodes)
|
||||
|
||||
containers = []
|
||||
host = api.registered_host()
|
||||
for container in host.ls():
|
||||
|
||||
node = container["node"]
|
||||
|
||||
# Usually the loaded containers don't have any complex references
|
||||
# and the contained children should be all we need. So we disregard
|
||||
# checking for .references() on the nodes.
|
||||
members = set(node.allSubChildren())
|
||||
members.add(node) # include the node itself
|
||||
|
||||
# If there's an intersection
|
||||
if not lookup.isdisjoint(members):
|
||||
containers.append(container)
|
||||
|
||||
return containers
|
||||
|
||||
|
||||
def iter_upstream(node):
|
||||
"""Yields all upstream inputs for the current node.
|
||||
|
||||
This includes all `node.inputAncestors()` but also traverses through all
|
||||
`node.references()` for the node itself and for any of the upstream nodes.
|
||||
This method has no max-depth and will collect all upstream inputs.
|
||||
|
||||
Yields:
|
||||
hou.Node: The upstream nodes, including references.
|
||||
|
||||
"""
|
||||
|
||||
upstream = node.inputAncestors(
|
||||
include_ref_inputs=True, follow_subnets=True
|
||||
)
|
||||
|
||||
# Initialize process queue with the node's ancestors itself
|
||||
queue = list(upstream)
|
||||
collected = set(upstream)
|
||||
|
||||
# Traverse upstream references for all nodes and yield them as we
|
||||
# process the queue.
|
||||
while queue:
|
||||
upstream_node = queue.pop()
|
||||
yield upstream_node
|
||||
|
||||
# Find its references that are not collected yet.
|
||||
references = upstream_node.references()
|
||||
references = [n for n in references if n not in collected]
|
||||
|
||||
queue.extend(references)
|
||||
collected.update(references)
|
||||
|
||||
# Include the references' ancestors that have not been collected yet.
|
||||
for reference in references:
|
||||
ancestors = reference.inputAncestors(
|
||||
include_ref_inputs=True, follow_subnets=True
|
||||
)
|
||||
ancestors = [n for n in ancestors if n not in collected]
|
||||
|
||||
queue.extend(ancestors)
|
||||
collected.update(ancestors)
|
||||
|
||||
|
||||
class CollectUpstreamInputs(pyblish.api.InstancePlugin):
|
||||
"""Collect source input containers used for this publish.
|
||||
|
||||
This will include `inputs` data of which loaded publishes were used in the
|
||||
generation of this publish. This leaves an upstream trace to what was used
|
||||
as input.
|
||||
|
||||
"""
|
||||
|
||||
label = "Collect Inputs"
|
||||
order = pyblish.api.CollectorOrder + 0.4
|
||||
hosts = ["houdini"]
|
||||
|
||||
def process(self, instance):
|
||||
# We can't get the "inputAncestors" directly from the ROP
|
||||
# node, so we find the related output node (set in SOP/COP path)
|
||||
# and include that together with its ancestors
|
||||
output = instance.data.get("output_node")
|
||||
|
||||
if output is None:
|
||||
# If no valid output node is set then ignore it as validation
|
||||
# will be checking those cases.
|
||||
self.log.debug(
|
||||
"No output node found, skipping " "collecting of inputs.."
|
||||
)
|
||||
return
|
||||
|
||||
# Collect all upstream parents
|
||||
nodes = list(iter_upstream(output))
|
||||
nodes.append(output)
|
||||
|
||||
# Collect containers for the given set of nodes
|
||||
containers = collect_input_containers(nodes)
|
||||
|
||||
inputs = [c["representation"] for c in containers]
|
||||
instance.data["inputs"] = inputs
|
||||
|
||||
self.log.info("Collected inputs: %s" % inputs)
|
||||
|
|
@ -31,6 +31,13 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
def process(self, context):
|
||||
|
||||
nodes = hou.node("/out").children()
|
||||
|
||||
# Include instances in USD stage only when it exists so it
|
||||
# remains backwards compatible with version before houdini 18
|
||||
stage = hou.node("/stage")
|
||||
if stage:
|
||||
nodes += stage.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop)
|
||||
|
||||
for node in nodes:
|
||||
|
||||
if not node.parm("id"):
|
||||
|
|
@ -55,6 +62,8 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
# Create nice name if the instance has a frame range.
|
||||
label = data.get("name", node.name())
|
||||
label += " (%s)" % data["asset"] # include asset in name
|
||||
|
||||
if "frameStart" in data and "frameEnd" in data:
|
||||
frames = "[{frameStart} - {frameEnd}]".format(**data)
|
||||
label = "{} {}".format(label, frames)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,152 @@
|
|||
import hou
|
||||
import pyblish.api
|
||||
from avalon.houdini import lib
|
||||
import openpype.hosts.houdini.api.usd as hou_usdlib
|
||||
import openpype.lib.usdlib as usdlib
|
||||
|
||||
|
||||
class CollectInstancesUsdLayered(pyblish.api.ContextPlugin):
|
||||
"""Collect Instances from a ROP Network and its configured layer paths.
|
||||
|
||||
The output nodes of the ROP node will only be published when *any* of the
|
||||
layers remain set to 'publish' by the user.
|
||||
|
||||
This works differently from most of our Avalon instances in the pipeline.
|
||||
As opposed to storing `pyblish.avalon.instance` as id on the node we store
|
||||
`pyblish.avalon.usdlayered`.
|
||||
|
||||
Additionally this instance has no need for storing family, asset, subset
|
||||
or name on the nodes. Instead all information is retrieved solely from
|
||||
the output filepath, which is an Avalon URI:
|
||||
avalon://{asset}/{subset}.{representation}
|
||||
|
||||
Each final ROP node is considered a dependency for any of the Configured
|
||||
Save Path layers it sets along the way. As such, the instances shown in
|
||||
the Pyblish UI are solely the configured layers. The encapsulating usd
|
||||
files are generated whenever *any* of the dependencies is published.
|
||||
|
||||
These dependency instances are stored in:
|
||||
instance.data["publishDependencies"]
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.01
|
||||
label = "Collect Instances (USD Configured Layers)"
|
||||
hosts = ["houdini"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
stage = hou.node("/stage")
|
||||
if not stage:
|
||||
# Likely Houdini version <18
|
||||
return
|
||||
|
||||
nodes = stage.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop)
|
||||
for node in nodes:
|
||||
|
||||
if not node.parm("id"):
|
||||
continue
|
||||
|
||||
if node.evalParm("id") != "pyblish.avalon.usdlayered":
|
||||
continue
|
||||
|
||||
has_family = node.evalParm("family")
|
||||
assert has_family, "'%s' is missing 'family'" % node.name()
|
||||
|
||||
self.process_node(node, context)
|
||||
|
||||
def sort_by_family(instance):
|
||||
"""Sort by family"""
|
||||
return instance.data.get("families", instance.data.get("family"))
|
||||
|
||||
# Sort/grouped by family (preserving local index)
|
||||
context[:] = sorted(context, key=sort_by_family)
|
||||
|
||||
return context
|
||||
|
||||
def process_node(self, node, context):
|
||||
|
||||
# Allow a single ROP node or a full ROP network of USD ROP nodes
|
||||
# to be processed as a single entry that should "live together" on
|
||||
# a publish.
|
||||
if node.type().name() == "ropnet":
|
||||
# All rop nodes inside ROP Network
|
||||
ropnodes = node.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop)
|
||||
else:
|
||||
# A single node
|
||||
ropnodes = [node]
|
||||
|
||||
data = lib.read(node)
|
||||
|
||||
# Don't use the explicit "colorbleed.usd.layered" family for publishing
|
||||
# instead use the "colorbleed.usd" family to integrate.
|
||||
data["publishFamilies"] = ["colorbleed.usd"]
|
||||
|
||||
# For now group ALL of them into USD Layer subset group
|
||||
# Allow this subset to be grouped into a USD Layer on creation
|
||||
data["subsetGroup"] = "USD Layer"
|
||||
|
||||
instances = list()
|
||||
dependencies = []
|
||||
for ropnode in ropnodes:
|
||||
|
||||
# Create a dependency instance per ROP Node.
|
||||
lopoutput = ropnode.evalParm("lopoutput")
|
||||
dependency_save_data = self.get_save_data(lopoutput)
|
||||
dependency = context.create_instance(dependency_save_data["name"])
|
||||
dependency.append(ropnode)
|
||||
dependency.data.update(data)
|
||||
dependency.data.update(dependency_save_data)
|
||||
dependency.data["family"] = "colorbleed.usd.dependency"
|
||||
dependency.data["optional"] = False
|
||||
dependencies.append(dependency)
|
||||
|
||||
# Hide the dependency instance from the context
|
||||
context.pop()
|
||||
|
||||
# Get all configured layers for this USD ROP node
|
||||
# and create a Pyblish instance for each one
|
||||
layers = hou_usdlib.get_configured_save_layers(ropnode)
|
||||
for layer in layers:
|
||||
save_path = hou_usdlib.get_layer_save_path(layer)
|
||||
save_data = self.get_save_data(save_path)
|
||||
if not save_data:
|
||||
continue
|
||||
self.log.info(save_path)
|
||||
|
||||
instance = context.create_instance(save_data["name"])
|
||||
instance[:] = [node]
|
||||
|
||||
# Set the instance data
|
||||
instance.data.update(data)
|
||||
instance.data.update(save_data)
|
||||
instance.data["usdLayer"] = layer
|
||||
|
||||
# Don't allow the Pyblish `instanceToggled` we have installed
|
||||
# to set this node to bypass.
|
||||
instance.data["_allowToggleBypass"] = False
|
||||
|
||||
instances.append(instance)
|
||||
|
||||
# Store the collected ROP node dependencies
|
||||
self.log.debug("Collected dependencies: %s" % (dependencies,))
|
||||
for instance in instances:
|
||||
instance.data["publishDependencies"] = dependencies
|
||||
|
||||
def get_save_data(self, save_path):
|
||||
|
||||
# Resolve Avalon URI
|
||||
uri_data = usdlib.parse_avalon_uri(save_path)
|
||||
if not uri_data:
|
||||
self.log.warning("Non Avalon URI Layer Path: %s" % save_path)
|
||||
return {}
|
||||
|
||||
# Collect asset + subset from URI
|
||||
name = "{subset} ({asset})".format(**uri_data)
|
||||
fname = "{asset}_{subset}.{ext}".format(**uri_data)
|
||||
|
||||
data = dict(uri_data)
|
||||
data["usdSavePath"] = save_path
|
||||
data["usdFilename"] = fname
|
||||
data["name"] = name
|
||||
return data
|
||||
|
|
@ -2,13 +2,20 @@ import pyblish.api
|
|||
|
||||
|
||||
class CollectOutputSOPPath(pyblish.api.InstancePlugin):
|
||||
"""Collect the out node's SOP Path value."""
|
||||
"""Collect the out node's SOP/COP Path value."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
families = ["pointcache",
|
||||
"vdbcache"]
|
||||
families = [
|
||||
"pointcache",
|
||||
"camera",
|
||||
"vdbcache",
|
||||
"imagesequence",
|
||||
"usd",
|
||||
"usdrender",
|
||||
]
|
||||
|
||||
hosts = ["houdini"]
|
||||
label = "Collect Output SOP Path"
|
||||
label = "Collect Output Node Path"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -17,12 +24,44 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin):
|
|||
node = instance[0]
|
||||
|
||||
# Get sop path
|
||||
if node.type().name() == "alembic":
|
||||
sop_path_parm = "sop_path"
|
||||
node_type = node.type().name()
|
||||
if node_type == "geometry":
|
||||
out_node = node.parm("soppath").evalAsNode()
|
||||
|
||||
elif node_type == "alembic":
|
||||
|
||||
# Alembic can switch between using SOP Path or object
|
||||
if node.parm("use_sop_path").eval():
|
||||
out_node = node.parm("sop_path").evalAsNode()
|
||||
else:
|
||||
root = node.parm("root").eval()
|
||||
objects = node.parm("objects").eval()
|
||||
path = root + "/" + objects
|
||||
out_node = hou.node(path)
|
||||
|
||||
elif node_type == "comp":
|
||||
out_node = node.parm("coppath").evalAsNode()
|
||||
|
||||
elif node_type == "usd" or node_type == "usdrender":
|
||||
out_node = node.parm("loppath").evalAsNode()
|
||||
|
||||
elif node_type == "usd_rop" or node_type == "usdrender_rop":
|
||||
# Inside Solaris e.g. /stage (not in ROP context)
|
||||
# When incoming connection is present it takes it directly
|
||||
inputs = node.inputs()
|
||||
if inputs:
|
||||
out_node = inputs[0]
|
||||
else:
|
||||
out_node = node.parm("loppath").evalAsNode()
|
||||
|
||||
else:
|
||||
sop_path_parm = "soppath"
|
||||
raise ValueError(
|
||||
"ROP node type '%s' is" " not supported." % node_type
|
||||
)
|
||||
|
||||
sop_path = node.parm(sop_path_parm).eval()
|
||||
out_node = hou.node(sop_path)
|
||||
if not out_node:
|
||||
self.log.warning("No output node collected.")
|
||||
return
|
||||
|
||||
self.log.debug("Output node: %s" % out_node.path())
|
||||
instance.data["output_node"] = out_node
|
||||
|
|
|
|||
135
openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py
Normal file
135
openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py
Normal file
|
|
@ -0,0 +1,135 @@
|
|||
import re
|
||||
import os
|
||||
|
||||
import hou
|
||||
import pyblish.api
|
||||
|
||||
|
||||
def get_top_referenced_parm(parm):
|
||||
|
||||
processed = set() # disallow infinite loop
|
||||
while True:
|
||||
if parm.path() in processed:
|
||||
raise RuntimeError("Parameter references result in cycle.")
|
||||
|
||||
processed.add(parm.path())
|
||||
|
||||
ref = parm.getReferencedParm()
|
||||
if ref.path() == parm.path():
|
||||
# It returns itself when it doesn't reference
|
||||
# another parameter
|
||||
return ref
|
||||
else:
|
||||
parm = ref
|
||||
|
||||
|
||||
def evalParmNoFrame(node, parm, pad_character="#"):
|
||||
|
||||
parameter = node.parm(parm)
|
||||
assert parameter, "Parameter does not exist: %s.%s" % (node, parm)
|
||||
|
||||
# If the parameter has a parameter reference, then get that
|
||||
# parameter instead as otherwise `unexpandedString()` fails.
|
||||
parameter = get_top_referenced_parm(parameter)
|
||||
|
||||
# Substitute out the frame numbering with padded characters
|
||||
try:
|
||||
raw = parameter.unexpandedString()
|
||||
except hou.Error as exc:
|
||||
print("Failed: %s" % parameter)
|
||||
raise RuntimeError(exc)
|
||||
|
||||
def replace(match):
|
||||
padding = 1
|
||||
n = match.group(2)
|
||||
if n and int(n):
|
||||
padding = int(n)
|
||||
return pad_character * padding
|
||||
|
||||
expression = re.sub(r"(\$F([0-9]*))", replace, raw)
|
||||
|
||||
with hou.ScriptEvalContext(parameter):
|
||||
return hou.expandStringAtFrame(expression, 0)
|
||||
|
||||
|
||||
class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
||||
"""Collect USD Render Products
|
||||
|
||||
Collects the instance.data["files"] for the render products.
|
||||
|
||||
Provides:
|
||||
instance -> files
|
||||
|
||||
"""
|
||||
|
||||
label = "Redshift ROP Render Products"
|
||||
order = pyblish.api.CollectorOrder + 0.4
|
||||
hosts = ["houdini"]
|
||||
families = ["redshift_rop"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop = instance[0]
|
||||
|
||||
# Collect chunkSize
|
||||
chunk_size_parm = rop.parm("chunkSize")
|
||||
if chunk_size_parm:
|
||||
chunk_size = int(chunk_size_parm.eval())
|
||||
instance.data["chunkSize"] = chunk_size
|
||||
self.log.debug("Chunk Size: %s" % chunk_size)
|
||||
|
||||
default_prefix = evalParmNoFrame(rop, "RS_outputFileNamePrefix")
|
||||
beauty_suffix = rop.evalParm("RS_outputBeautyAOVSuffix")
|
||||
render_products = []
|
||||
|
||||
# Default beauty AOV
|
||||
beauty_product = self.get_render_product_name(
|
||||
prefix=default_prefix, suffix=beauty_suffix
|
||||
)
|
||||
render_products.append(beauty_product)
|
||||
|
||||
num_aovs = rop.evalParm("RS_aov")
|
||||
for index in range(num_aovs):
|
||||
i = index + 1
|
||||
|
||||
# Skip disabled AOVs
|
||||
if not rop.evalParm("RS_aovEnable_%s" % i):
|
||||
continue
|
||||
|
||||
aov_suffix = rop.evalParm("RS_aovSuffix_%s" % i)
|
||||
aov_prefix = evalParmNoFrame(rop, "RS_aovCustomPrefix_%s" % i)
|
||||
if not aov_prefix:
|
||||
aov_prefix = default_prefix
|
||||
|
||||
aov_product = self.get_render_product_name(aov_prefix, aov_suffix)
|
||||
render_products.append(aov_product)
|
||||
|
||||
for product in render_products:
|
||||
self.log.debug("Found render product: %s" % product)
|
||||
|
||||
filenames = list(render_products)
|
||||
instance.data["files"] = filenames
|
||||
|
||||
def get_render_product_name(self, prefix, suffix):
|
||||
"""Return the output filename using the AOV prefix and suffix"""
|
||||
|
||||
# When AOV is explicitly defined in prefix we just swap it out
|
||||
# directly with the AOV suffix to embed it.
|
||||
# Note: ${AOV} seems to be evaluated in the parameter as %AOV%
|
||||
has_aov_in_prefix = "%AOV%" in prefix
|
||||
if has_aov_in_prefix:
|
||||
# It seems that when some special separator characters are present
|
||||
# before the %AOV% token that Redshift will secretly remove it if
|
||||
# there is no suffix for the current product, for example:
|
||||
# foo_%AOV% -> foo.exr
|
||||
pattern = "%AOV%" if suffix else "[._-]?%AOV%"
|
||||
product_name = re.sub(pattern, suffix, prefix, flags=re.IGNORECASE)
|
||||
else:
|
||||
if suffix:
|
||||
# Add ".{suffix}" before the extension
|
||||
prefix_base, ext = os.path.splitext(prefix)
|
||||
product_name = prefix_base + "." + suffix + ext
|
||||
else:
|
||||
product_name = prefix
|
||||
|
||||
return product_name
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
import hou
|
||||
from avalon.houdini import lib
|
||||
|
||||
|
||||
class CollectRemotePublishSettings(pyblish.api.ContextPlugin):
|
||||
"""Collect custom settings of the Remote Publish node."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
families = ["*"]
|
||||
hosts = ["houdini"]
|
||||
targets = ["deadline"]
|
||||
label = "Remote Publish Submission Settings"
|
||||
actions = [openpype.api.RepairAction]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
node = hou.node("/out/REMOTE_PUBLISH")
|
||||
if not node:
|
||||
return
|
||||
|
||||
attributes = lib.read(node)
|
||||
|
||||
# Debug the settings we have collected
|
||||
for key, value in sorted(attributes.items()):
|
||||
self.log.debug("Collected %s: %s" % (key, value))
|
||||
|
||||
context.data.update(attributes)
|
||||
|
|
@ -0,0 +1,133 @@
|
|||
import re
|
||||
import os
|
||||
|
||||
import hou
|
||||
import pxr.UsdRender
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
def get_var_changed(variable=None):
|
||||
"""Return changed variables and operators that use it.
|
||||
|
||||
Note: `varchange` hscript states that it forces a recook of the nodes
|
||||
that use Variables. That was tested in Houdini
|
||||
18.0.391.
|
||||
|
||||
Args:
|
||||
variable (str, Optional): A specific variable to query the operators
|
||||
for. When None is provided it will return all variables that have
|
||||
had recent changes and require a recook. Defaults to None.
|
||||
|
||||
Returns:
|
||||
dict: Variable that changed with the operators that use it.
|
||||
|
||||
"""
|
||||
cmd = "varchange -V"
|
||||
if variable:
|
||||
cmd += " {0}".format(variable)
|
||||
output, _ = hou.hscript(cmd)
|
||||
|
||||
changed = {}
|
||||
for line in output.split("Variable: "):
|
||||
if not line.strip():
|
||||
continue
|
||||
|
||||
split = line.split()
|
||||
var = split[0]
|
||||
operators = split[1:]
|
||||
changed[var] = operators
|
||||
|
||||
return changed
|
||||
|
||||
|
||||
class CollectRenderProducts(pyblish.api.InstancePlugin):
|
||||
"""Collect USD Render Products."""
|
||||
|
||||
label = "Collect Render Products"
|
||||
order = pyblish.api.CollectorOrder + 0.4
|
||||
hosts = ["houdini"]
|
||||
families = ["usdrender"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
node = instance.data.get("output_node")
|
||||
if not node:
|
||||
rop_path = instance[0].path()
|
||||
raise RuntimeError(
|
||||
"No output node found. Make sure to connect an "
|
||||
"input to the USD ROP: %s" % rop_path
|
||||
)
|
||||
|
||||
# Workaround Houdini 18.0.391 bug where $HIPNAME doesn't automatically
|
||||
# update after scene save.
|
||||
if hou.applicationVersion() == (18, 0, 391):
|
||||
self.log.debug(
|
||||
"Checking for recook to workaround " "$HIPNAME refresh bug..."
|
||||
)
|
||||
changed = get_var_changed("HIPNAME").get("HIPNAME")
|
||||
if changed:
|
||||
self.log.debug("Recooking for $HIPNAME refresh bug...")
|
||||
for operator in changed:
|
||||
hou.node(operator).cook(force=True)
|
||||
|
||||
# Make sure to recook any 'cache' nodes in the history chain
|
||||
chain = [node]
|
||||
chain.extend(node.inputAncestors())
|
||||
for input_node in chain:
|
||||
if input_node.type().name() == "cache":
|
||||
input_node.cook(force=True)
|
||||
|
||||
stage = node.stage()
|
||||
|
||||
filenames = []
|
||||
for prim in stage.Traverse():
|
||||
|
||||
if not prim.IsA(pxr.UsdRender.Product):
|
||||
continue
|
||||
|
||||
# Get Render Product Name
|
||||
product = pxr.UsdRender.Product(prim)
|
||||
|
||||
# We force taking it from any random time sample as opposed to
|
||||
# "default" that the USD Api falls back to since that won't return
|
||||
# time sampled values if they were set per time sample.
|
||||
name = product.GetProductNameAttr().Get(time=0)
|
||||
dirname = os.path.dirname(name)
|
||||
basename = os.path.basename(name)
|
||||
|
||||
dollarf_regex = r"(\$F([0-9]?))"
|
||||
frame_regex = r"^(.+\.)([0-9]+)(\.[a-zA-Z]+)$"
|
||||
if re.match(dollarf_regex, basename):
|
||||
# TODO: Confirm this actually is allowed USD stages and HUSK
|
||||
# Substitute $F
|
||||
def replace(match):
|
||||
"""Replace $F4 with padded #."""
|
||||
padding = int(match.group(2)) if match.group(2) else 1
|
||||
return "#" * padding
|
||||
|
||||
filename_base = re.sub(dollarf_regex, replace, basename)
|
||||
filename = os.path.join(dirname, filename_base)
|
||||
else:
|
||||
# Substitute basename.0001.ext
|
||||
def replace(match):
|
||||
prefix, frame, ext = match.groups()
|
||||
padding = "#" * len(frame)
|
||||
return prefix + padding + ext
|
||||
|
||||
filename_base = re.sub(frame_regex, replace, basename)
|
||||
filename = os.path.join(dirname, filename_base)
|
||||
filename = filename.replace("\\", "/")
|
||||
|
||||
assert "#" in filename, (
|
||||
"Couldn't resolve render product name "
|
||||
"with frame number: %s" % name
|
||||
)
|
||||
|
||||
filenames.append(filename)
|
||||
|
||||
prim_path = str(prim.GetPath())
|
||||
self.log.info("Collected %s name: %s" % (prim_path, filename))
|
||||
|
||||
# Filenames for Deadline
|
||||
instance.data["files"] = filenames
|
||||
110
openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py
Normal file
110
openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py
Normal file
|
|
@ -0,0 +1,110 @@
|
|||
import pyblish.api
|
||||
|
||||
from avalon import io
|
||||
import openpype.lib.usdlib as usdlib
|
||||
|
||||
|
||||
class CollectUsdBootstrap(pyblish.api.InstancePlugin):
|
||||
"""Collect special Asset/Shot bootstrap instances if those are needed.
|
||||
|
||||
Some specific subsets are intended to be part of the default structure
|
||||
of an "Asset" or "Shot" in our USD pipeline. For example, for an Asset
|
||||
we layer a Model and Shade USD file over each other and expose that in
|
||||
a Asset USD file, ready to use.
|
||||
|
||||
On the first publish of any of the components of a Asset or Shot the
|
||||
missing pieces are bootstrapped and generated in the pipeline too. This
|
||||
means that on the very first publish of your model the Asset USD file
|
||||
will exist too.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.35
|
||||
label = "Collect USD Bootstrap"
|
||||
hosts = ["houdini"]
|
||||
families = ["usd", "usd.layered"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Detect whether the current subset is a subset in a pipeline
|
||||
def get_bootstrap(instance):
|
||||
instance_subset = instance.data["subset"]
|
||||
for name, layers in usdlib.PIPELINE.items():
|
||||
if instance_subset in set(layers):
|
||||
return name # e.g. "asset"
|
||||
break
|
||||
else:
|
||||
return
|
||||
|
||||
bootstrap = get_bootstrap(instance)
|
||||
if bootstrap:
|
||||
self.add_bootstrap(instance, bootstrap)
|
||||
|
||||
# Check if any of the dependencies requires a bootstrap
|
||||
for dependency in instance.data.get("publishDependencies", list()):
|
||||
bootstrap = get_bootstrap(dependency)
|
||||
if bootstrap:
|
||||
self.add_bootstrap(dependency, bootstrap)
|
||||
|
||||
def add_bootstrap(self, instance, bootstrap):
|
||||
|
||||
self.log.debug("Add bootstrap for: %s" % bootstrap)
|
||||
|
||||
asset = io.find_one({"name": instance.data["asset"], "type": "asset"})
|
||||
assert asset, "Asset must exist: %s" % asset
|
||||
|
||||
# Check which are not about to be created and don't exist yet
|
||||
required = {"shot": ["usdShot"], "asset": ["usdAsset"]}.get(bootstrap)
|
||||
|
||||
require_all_layers = instance.data.get("requireAllLayers", False)
|
||||
if require_all_layers:
|
||||
# USD files load fine in usdview and Houdini even when layered or
|
||||
# referenced files do not exist. So by default we don't require
|
||||
# the layers to exist.
|
||||
layers = usdlib.PIPELINE.get(bootstrap)
|
||||
if layers:
|
||||
required += list(layers)
|
||||
|
||||
self.log.debug("Checking required bootstrap: %s" % required)
|
||||
for subset in required:
|
||||
if self._subset_exists(instance, subset, asset):
|
||||
continue
|
||||
|
||||
self.log.debug(
|
||||
"Creating {0} USD bootstrap: {1} {2}".format(
|
||||
bootstrap, asset["name"], subset
|
||||
)
|
||||
)
|
||||
|
||||
new = instance.context.create_instance(subset)
|
||||
new.data["subset"] = subset
|
||||
new.data["label"] = "{0} ({1})".format(subset, asset["name"])
|
||||
new.data["family"] = "usd.bootstrap"
|
||||
new.data["comment"] = "Automated bootstrap USD file."
|
||||
new.data["publishFamilies"] = ["usd"]
|
||||
|
||||
# Do not allow the user to toggle this instance
|
||||
new.data["optional"] = False
|
||||
|
||||
# Copy some data from the instance for which we bootstrap
|
||||
for key in ["asset"]:
|
||||
new.data[key] = instance.data[key]
|
||||
|
||||
def _subset_exists(self, instance, subset, asset):
|
||||
"""Return whether subset exists in current context or in database."""
|
||||
# Allow it to be created during this publish session
|
||||
context = instance.context
|
||||
for inst in context:
|
||||
if (
|
||||
inst.data["subset"] == subset
|
||||
and inst.data["asset"] == asset["name"]
|
||||
):
|
||||
return True
|
||||
|
||||
# Or, if they already exist in the database we can
|
||||
# skip them too.
|
||||
return bool(
|
||||
io.find_one(
|
||||
{"name": subset, "type": "subset", "parent": asset["_id"]}
|
||||
)
|
||||
)
|
||||
61
openpype/hosts/houdini/plugins/publish/collect_usd_layers.py
Normal file
61
openpype/hosts/houdini/plugins/publish/collect_usd_layers.py
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import openpype.hosts.houdini.api.usd as usdlib
|
||||
|
||||
|
||||
class CollectUsdLayers(pyblish.api.InstancePlugin):
|
||||
"""Collect the USD Layers that have configured save paths."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.35
|
||||
label = "Collect USD Layers"
|
||||
hosts = ["houdini"]
|
||||
families = ["usd"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
output = instance.data.get("output_node")
|
||||
if not output:
|
||||
self.log.debug("No output node found..")
|
||||
return
|
||||
|
||||
rop_node = instance[0]
|
||||
|
||||
save_layers = []
|
||||
for layer in usdlib.get_configured_save_layers(rop_node):
|
||||
|
||||
info = layer.rootPrims.get("HoudiniLayerInfo")
|
||||
save_path = info.customData.get("HoudiniSavePath")
|
||||
creator = info.customData.get("HoudiniCreatorNode")
|
||||
|
||||
self.log.debug("Found configured save path: "
|
||||
"%s -> %s" % (layer, save_path))
|
||||
|
||||
# Log node that configured this save path
|
||||
if creator:
|
||||
self.log.debug("Created by: %s" % creator)
|
||||
|
||||
save_layers.append((layer, save_path))
|
||||
|
||||
# Store on the instance
|
||||
instance.data["usdConfiguredSavePaths"] = save_layers
|
||||
|
||||
# Create configured layer instances so User can disable updating
|
||||
# specific configured layers for publishing.
|
||||
context = instance.context
|
||||
for layer, save_path in save_layers:
|
||||
name = os.path.basename(save_path)
|
||||
label = "{0} -> {1}".format(instance.data["name"], name)
|
||||
layer_inst = context.create_instance(name)
|
||||
|
||||
family = "colorbleed.usdlayer"
|
||||
layer_inst.data["family"] = family
|
||||
layer_inst.data["families"] = [family]
|
||||
layer_inst.data["subset"] = "__stub__"
|
||||
layer_inst.data["label"] = label
|
||||
layer_inst.data["asset"] = instance.data["asset"]
|
||||
layer_inst.append(instance[0]) # include same USD ROP
|
||||
layer_inst.append((layer, save_path)) # include layer data
|
||||
|
||||
# Allow this subset to be grouped into a USD Layer on creation
|
||||
layer_inst.data["subsetGroup"] = "USD Layer"
|
||||
|
|
@ -3,7 +3,7 @@ import hou
|
|||
|
||||
|
||||
class CollectWorksceneFPS(pyblish.api.ContextPlugin):
|
||||
"""Get the FPS of the work scene"""
|
||||
"""Get the FPS of the work scene."""
|
||||
|
||||
label = "Workscene FPS"
|
||||
order = pyblish.api.CollectorOrder
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import os
|
|||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExtractAlembic(openpype.api.Extractor):
|
||||
|
|
@ -13,29 +14,20 @@ class ExtractAlembic(openpype.api.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
import hou
|
||||
|
||||
ropnode = instance[0]
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
output = ropnode.evalParm("filename")
|
||||
staging_dir = os.path.dirname(output)
|
||||
# instance.data["stagingDir"] = staging_dir
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
file_name = os.path.basename(output)
|
||||
|
||||
# We run the render
|
||||
self.log.info("Writing alembic '%s' to '%s'" % (file_name,
|
||||
staging_dir))
|
||||
try:
|
||||
ropnode.render()
|
||||
except hou.Error as exc:
|
||||
# The hou.Error is not inherited from a Python Exception class,
|
||||
# so we explicitly capture the houdini error, otherwise pyblish
|
||||
# will remain hanging.
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
raise RuntimeError("Render failed: {0}".format(exc))
|
||||
|
||||
render_rop(ropnode)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
|
|||
35
openpype/hosts/houdini/plugins/publish/extract_composite.py
Normal file
35
openpype/hosts/houdini/plugins/publish/extract_composite.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExtractComposite(openpype.api.Extractor):
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Composite (Image Sequence)"
|
||||
hosts = ["houdini"]
|
||||
families = ["imagesequence"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
ropnode = instance[0]
|
||||
|
||||
# Get the filename from the copoutput parameter
|
||||
# `.evalParm(parameter)` will make sure all tokens are resolved
|
||||
output = ropnode.evalParm("copoutput")
|
||||
staging_dir = os.path.dirname(output)
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
file_name = os.path.basename(output)
|
||||
|
||||
self.log.info("Writing comp '%s' to '%s'" % (file_name, staging_dir))
|
||||
|
||||
render_rop(ropnode)
|
||||
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = []
|
||||
|
||||
frames = instance.data["frames"]
|
||||
instance.data["files"].append(frames)
|
||||
42
openpype/hosts/houdini/plugins/publish/extract_usd.py
Normal file
42
openpype/hosts/houdini/plugins/publish/extract_usd.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExtractUSD(openpype.api.Extractor):
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract USD"
|
||||
hosts = ["houdini"]
|
||||
families = ["usd",
|
||||
"usdModel",
|
||||
"usdSetDress"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
ropnode = instance[0]
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
output = ropnode.evalParm("lopoutput")
|
||||
staging_dir = os.path.dirname(output)
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
file_name = os.path.basename(output)
|
||||
|
||||
self.log.info("Writing USD '%s' to '%s'" % (file_name, staging_dir))
|
||||
|
||||
render_rop(ropnode)
|
||||
|
||||
assert os.path.exists(output), "Output does not exist: %s" % output
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'usd',
|
||||
'ext': 'usd',
|
||||
'files': file_name,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
315
openpype/hosts/houdini/plugins/publish/extract_usd_layered.py
Normal file
315
openpype/hosts/houdini/plugins/publish/extract_usd_layered.py
Normal file
|
|
@ -0,0 +1,315 @@
|
|||
import os
|
||||
import contextlib
|
||||
import hou
|
||||
import sys
|
||||
from collections import deque
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
import openpype.hosts.houdini.api.usd as hou_usdlib
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExitStack(object):
|
||||
"""Context manager for dynamic management of a stack of exit callbacks.
|
||||
|
||||
For example:
|
||||
|
||||
with ExitStack() as stack:
|
||||
files = [stack.enter_context(open(fname)) for fname in filenames]
|
||||
# All opened files will automatically be closed at the end of
|
||||
# the with statement, even if attempts to open files later
|
||||
# in the list raise an exception
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._exit_callbacks = deque()
|
||||
|
||||
def pop_all(self):
|
||||
"""Preserve the context stack by transferring it to a new instance"""
|
||||
new_stack = type(self)()
|
||||
new_stack._exit_callbacks = self._exit_callbacks
|
||||
self._exit_callbacks = deque()
|
||||
return new_stack
|
||||
|
||||
def _push_cm_exit(self, cm, cm_exit):
|
||||
"""Helper to correctly register callbacks to __exit__ methods"""
|
||||
|
||||
def _exit_wrapper(*exc_details):
|
||||
return cm_exit(cm, *exc_details)
|
||||
|
||||
_exit_wrapper.__self__ = cm
|
||||
self.push(_exit_wrapper)
|
||||
|
||||
def push(self, exit):
|
||||
"""Registers a callback with the standard __exit__ method signature.
|
||||
|
||||
Can suppress exceptions the same way __exit__ methods can.
|
||||
|
||||
Also accepts any object with an __exit__ method (registering a call
|
||||
to the method instead of the object itself)
|
||||
|
||||
"""
|
||||
# We use an unbound method rather than a bound method to follow
|
||||
# the standard lookup behaviour for special methods
|
||||
_cb_type = type(exit)
|
||||
try:
|
||||
exit_method = _cb_type.__exit__
|
||||
except AttributeError:
|
||||
# Not a context manager, so assume its a callable
|
||||
self._exit_callbacks.append(exit)
|
||||
else:
|
||||
self._push_cm_exit(exit, exit_method)
|
||||
return exit # Allow use as a decorator
|
||||
|
||||
def callback(self, callback, *args, **kwds):
|
||||
"""Registers an arbitrary callback and arguments.
|
||||
|
||||
Cannot suppress exceptions.
|
||||
"""
|
||||
|
||||
def _exit_wrapper(exc_type, exc, tb):
|
||||
callback(*args, **kwds)
|
||||
|
||||
# We changed the signature, so using @wraps is not appropriate, but
|
||||
# setting __wrapped__ may still help with introspection
|
||||
_exit_wrapper.__wrapped__ = callback
|
||||
self.push(_exit_wrapper)
|
||||
return callback # Allow use as a decorator
|
||||
|
||||
def enter_context(self, cm):
|
||||
"""Enters the supplied context manager
|
||||
|
||||
If successful, also pushes its __exit__ method as a callback and
|
||||
returns the result of the __enter__ method.
|
||||
"""
|
||||
# We look up the special methods on the type to match the with
|
||||
# statement
|
||||
_cm_type = type(cm)
|
||||
_exit = _cm_type.__exit__
|
||||
result = _cm_type.__enter__(cm)
|
||||
self._push_cm_exit(cm, _exit)
|
||||
return result
|
||||
|
||||
def close(self):
|
||||
"""Immediately unwind the context stack"""
|
||||
self.__exit__(None, None, None)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc_details):
|
||||
# We manipulate the exception state so it behaves as though
|
||||
# we were actually nesting multiple with statements
|
||||
frame_exc = sys.exc_info()[1]
|
||||
|
||||
def _fix_exception_context(new_exc, old_exc):
|
||||
while 1:
|
||||
exc_context = new_exc.__context__
|
||||
if exc_context in (None, frame_exc):
|
||||
break
|
||||
new_exc = exc_context
|
||||
new_exc.__context__ = old_exc
|
||||
|
||||
# Callbacks are invoked in LIFO order to match the behaviour of
|
||||
# nested context managers
|
||||
suppressed_exc = False
|
||||
while self._exit_callbacks:
|
||||
cb = self._exit_callbacks.pop()
|
||||
try:
|
||||
if cb(*exc_details):
|
||||
suppressed_exc = True
|
||||
exc_details = (None, None, None)
|
||||
except Exception:
|
||||
new_exc_details = sys.exc_info()
|
||||
# simulate the stack of exceptions by setting the context
|
||||
_fix_exception_context(new_exc_details[1], exc_details[1])
|
||||
if not self._exit_callbacks:
|
||||
raise
|
||||
exc_details = new_exc_details
|
||||
return suppressed_exc
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def parm_values(overrides):
|
||||
"""Override Parameter values during the context."""
|
||||
|
||||
originals = []
|
||||
try:
|
||||
for parm, value in overrides:
|
||||
originals.append((parm, parm.eval()))
|
||||
parm.set(value)
|
||||
yield
|
||||
finally:
|
||||
for parm, value in originals:
|
||||
# Parameter might not exist anymore so first
|
||||
# check whether it's still valid
|
||||
if hou.parm(parm.path()):
|
||||
parm.set(value)
|
||||
|
||||
|
||||
class ExtractUSDLayered(openpype.api.Extractor):
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Layered USD"
|
||||
hosts = ["houdini"]
|
||||
families = ["usdLayered", "usdShade"]
|
||||
|
||||
# Force Output Processors so it will always save any file
|
||||
# into our unique staging directory with processed Avalon paths
|
||||
output_processors = ["avalon_uri_processor", "stagingdir_processor"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
self.log.info("Extracting: %s" % instance)
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
fname = instance.data.get("usdFilename")
|
||||
|
||||
# The individual rop nodes are collected as "publishDependencies"
|
||||
dependencies = instance.data["publishDependencies"]
|
||||
ropnodes = [dependency[0] for dependency in dependencies]
|
||||
assert all(
|
||||
node.type().name() in {"usd", "usd_rop"} for node in ropnodes
|
||||
)
|
||||
|
||||
# Main ROP node, either a USD Rop or ROP network with
|
||||
# multiple USD ROPs
|
||||
node = instance[0]
|
||||
|
||||
# Collect any output dependencies that have not been processed yet
|
||||
# during extraction of other instances
|
||||
outputs = [fname]
|
||||
active_dependencies = [
|
||||
dep
|
||||
for dep in dependencies
|
||||
if dep.data.get("publish", True)
|
||||
and not dep.data.get("_isExtracted", False)
|
||||
]
|
||||
for dependency in active_dependencies:
|
||||
outputs.append(dependency.data["usdFilename"])
|
||||
|
||||
pattern = r"*[/\]{0} {0}"
|
||||
save_pattern = " ".join(pattern.format(fname) for fname in outputs)
|
||||
|
||||
# Run a stack of context managers before we start the render to
|
||||
# temporarily adjust USD ROP settings for our publish output.
|
||||
rop_overrides = {
|
||||
# This sets staging directory on the processor to force our
|
||||
# output files to end up in the Staging Directory.
|
||||
"stagingdiroutputprocessor_stagingDir": staging_dir,
|
||||
# Force the Avalon URI Output Processor to refactor paths for
|
||||
# references, payloads and layers to published paths.
|
||||
"avalonurioutputprocessor_use_publish_paths": True,
|
||||
# Only write out specific USD files based on our outputs
|
||||
"savepattern": save_pattern,
|
||||
}
|
||||
overrides = list()
|
||||
with ExitStack() as stack:
|
||||
|
||||
for ropnode in ropnodes:
|
||||
manager = hou_usdlib.outputprocessors(
|
||||
ropnode,
|
||||
processors=self.output_processors,
|
||||
disable_all_others=True,
|
||||
)
|
||||
stack.enter_context(manager)
|
||||
|
||||
# Some of these must be added after we enter the output
|
||||
# processor context manager because those parameters only
|
||||
# exist when the Output Processor is added to the ROP node.
|
||||
for name, value in rop_overrides.items():
|
||||
parm = ropnode.parm(name)
|
||||
assert parm, "Parm not found: %s.%s" % (
|
||||
ropnode.path(),
|
||||
name,
|
||||
)
|
||||
overrides.append((parm, value))
|
||||
|
||||
stack.enter_context(parm_values(overrides))
|
||||
|
||||
# Render the single ROP node or the full ROP network
|
||||
render_rop(node)
|
||||
|
||||
# Assert all output files in the Staging Directory
|
||||
for output_fname in outputs:
|
||||
path = os.path.join(staging_dir, output_fname)
|
||||
assert os.path.exists(path), "Output file must exist: %s" % path
|
||||
|
||||
# Set up the dependency for publish if they have new content
|
||||
# compared to previous publishes
|
||||
for dependency in active_dependencies:
|
||||
dependency_fname = dependency.data["usdFilename"]
|
||||
|
||||
filepath = os.path.join(staging_dir, dependency_fname)
|
||||
similar = self._compare_with_latest_publish(dependency, filepath)
|
||||
if similar:
|
||||
# Deactivate this dependency
|
||||
self.log.debug(
|
||||
"Dependency matches previous publish version,"
|
||||
" deactivating %s for publish" % dependency
|
||||
)
|
||||
dependency.data["publish"] = False
|
||||
else:
|
||||
self.log.debug("Extracted dependency: %s" % dependency)
|
||||
# This dependency should be published
|
||||
dependency.data["files"] = [dependency_fname]
|
||||
dependency.data["stagingDir"] = staging_dir
|
||||
dependency.data["_isExtracted"] = True
|
||||
|
||||
# Store the created files on the instance
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = []
|
||||
instance.data["files"].append(fname)
|
||||
|
||||
def _compare_with_latest_publish(self, dependency, new_file):
|
||||
|
||||
from avalon import api, io
|
||||
import filecmp
|
||||
|
||||
_, ext = os.path.splitext(new_file)
|
||||
|
||||
# Compare this dependency with the latest published version
|
||||
# to detect whether we should make this into a new publish
|
||||
# version. If not, skip it.
|
||||
asset = io.find_one(
|
||||
{"name": dependency.data["asset"], "type": "asset"}
|
||||
)
|
||||
subset = io.find_one(
|
||||
{
|
||||
"name": dependency.data["subset"],
|
||||
"type": "subset",
|
||||
"parent": asset["_id"],
|
||||
}
|
||||
)
|
||||
if not subset:
|
||||
# Subset doesn't exist yet. Definitely new file
|
||||
self.log.debug("No existing subset..")
|
||||
return False
|
||||
|
||||
version = io.find_one(
|
||||
{"type": "version", "parent": subset["_id"], },
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
if not version:
|
||||
self.log.debug("No existing version..")
|
||||
return False
|
||||
|
||||
representation = io.find_one(
|
||||
{
|
||||
"name": ext.lstrip("."),
|
||||
"type": "representation",
|
||||
"parent": version["_id"],
|
||||
}
|
||||
)
|
||||
if not representation:
|
||||
self.log.debug("No existing representation..")
|
||||
return False
|
||||
|
||||
old_file = api.get_representation_path(representation)
|
||||
if not os.path.exists(old_file):
|
||||
return False
|
||||
|
||||
return filecmp.cmp(old_file, new_file)
|
||||
|
|
@ -2,6 +2,7 @@ import os
|
|||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExtractVDBCache(openpype.api.Extractor):
|
||||
|
|
@ -13,8 +14,6 @@ class ExtractVDBCache(openpype.api.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
import hou
|
||||
|
||||
ropnode = instance[0]
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
|
|
@ -25,15 +24,8 @@ class ExtractVDBCache(openpype.api.Extractor):
|
|||
file_name = os.path.basename(sop_output)
|
||||
|
||||
self.log.info("Writing VDB '%s' to '%s'" % (file_name, staging_dir))
|
||||
try:
|
||||
ropnode.render()
|
||||
except hou.Error as exc:
|
||||
# The hou.Error is not inherited from a Python Exception class,
|
||||
# so we explicitly capture the houdini error, otherwise pyblish
|
||||
# will remain hanging.
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
raise RuntimeError("Render failed: {0}".format(exc))
|
||||
|
||||
render_rop(ropnode)
|
||||
|
||||
output = instance.data["frames"]
|
||||
|
||||
|
|
@ -41,9 +33,9 @@ class ExtractVDBCache(openpype.api.Extractor):
|
|||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'mov',
|
||||
'ext': 'mov',
|
||||
'files': output,
|
||||
"name": "vdb",
|
||||
"ext": "vdb",
|
||||
"files": output,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,51 @@
|
|||
import pyblish.api
|
||||
import avalon.api
|
||||
|
||||
from openpype.api import version_up
|
||||
from openpype.action import get_errored_plugins_from_data
|
||||
|
||||
|
||||
class IncrementCurrentFile(pyblish.api.InstancePlugin):
|
||||
"""Increment the current file.
|
||||
|
||||
Saves the current scene with an increased version number.
|
||||
|
||||
"""
|
||||
|
||||
label = "Increment current file"
|
||||
order = pyblish.api.IntegratorOrder + 9.0
|
||||
hosts = ["houdini"]
|
||||
families = ["colorbleed.usdrender", "redshift_rop"]
|
||||
targets = ["local"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# This should be a ContextPlugin, but this is a workaround
|
||||
# for a bug in pyblish to run once for a family: issue #250
|
||||
context = instance.context
|
||||
key = "__hasRun{}".format(self.__class__.__name__)
|
||||
if context.data.get(key, False):
|
||||
return
|
||||
else:
|
||||
context.data[key] = True
|
||||
|
||||
context = instance.context
|
||||
errored_plugins = get_errored_plugins_from_data(context)
|
||||
if any(
|
||||
plugin.__name__ == "HoudiniSubmitPublishDeadline"
|
||||
for plugin in errored_plugins
|
||||
):
|
||||
raise RuntimeError(
|
||||
"Skipping incrementing current file because "
|
||||
"submission to deadline failed."
|
||||
)
|
||||
|
||||
# Filename must not have changed since collecting
|
||||
host = avalon.api.registered_host()
|
||||
current_file = host.current_file()
|
||||
assert (
|
||||
context.data["currentFile"] == current_file
|
||||
), "Collected filename from current scene name."
|
||||
|
||||
new_filepath = version_up(current_file)
|
||||
host.save(new_filepath)
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
import pyblish.api
|
||||
|
||||
import hou
|
||||
from openpype.api import version_up
|
||||
from openpype.action import get_errored_plugins_from_data
|
||||
|
||||
|
||||
class IncrementCurrentFileDeadline(pyblish.api.ContextPlugin):
|
||||
"""Increment the current file.
|
||||
|
||||
Saves the current scene with an increased version number.
|
||||
|
||||
"""
|
||||
|
||||
label = "Increment current file"
|
||||
order = pyblish.api.IntegratorOrder + 9.0
|
||||
hosts = ["houdini"]
|
||||
targets = ["deadline"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
errored_plugins = get_errored_plugins_from_data(context)
|
||||
if any(
|
||||
plugin.__name__ == "HoudiniSubmitPublishDeadline"
|
||||
for plugin in errored_plugins
|
||||
):
|
||||
raise RuntimeError(
|
||||
"Skipping incrementing current file because "
|
||||
"submission to deadline failed."
|
||||
)
|
||||
|
||||
current_filepath = context.data["currentFile"]
|
||||
new_filepath = version_up(current_filepath)
|
||||
|
||||
hou.hipFile.save(file_name=new_filepath, save_to_recent_files=True)
|
||||
37
openpype/hosts/houdini/plugins/publish/save_scene.py
Normal file
37
openpype/hosts/houdini/plugins/publish/save_scene.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
import pyblish.api
|
||||
import avalon.api
|
||||
|
||||
|
||||
class SaveCurrentScene(pyblish.api.InstancePlugin):
|
||||
"""Save current scene"""
|
||||
|
||||
label = "Save current file"
|
||||
order = pyblish.api.IntegratorOrder - 0.49
|
||||
hosts = ["houdini"]
|
||||
families = ["usdrender",
|
||||
"redshift_rop"]
|
||||
targets = ["local"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# This should be a ContextPlugin, but this is a workaround
|
||||
# for a bug in pyblish to run once for a family: issue #250
|
||||
context = instance.context
|
||||
key = "__hasRun{}".format(self.__class__.__name__)
|
||||
if context.data.get(key, False):
|
||||
return
|
||||
else:
|
||||
context.data[key] = True
|
||||
|
||||
# Filename must not have changed since collecting
|
||||
host = avalon.api.registered_host()
|
||||
current_file = host.current_file()
|
||||
assert context.data['currentFile'] == current_file, (
|
||||
"Collected filename from current scene name."
|
||||
)
|
||||
|
||||
if host.has_unsaved_changes():
|
||||
self.log.info("Saving current file..")
|
||||
host.save_file(current_file)
|
||||
else:
|
||||
self.log.debug("No unsaved changes, skipping file save..")
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class SaveCurrentSceneDeadline(pyblish.api.ContextPlugin):
|
||||
"""Save current scene"""
|
||||
|
||||
label = "Save current file"
|
||||
order = pyblish.api.IntegratorOrder - 0.49
|
||||
hosts = ["houdini"]
|
||||
targets = ["deadline"]
|
||||
|
||||
def process(self, context):
|
||||
import hou
|
||||
|
||||
assert (
|
||||
context.data["currentFile"] == hou.hipFile.path()
|
||||
), "Collected filename from current scene name."
|
||||
|
||||
if hou.hipFile.hasUnsavedChanges():
|
||||
self.log.info("Saving current file..")
|
||||
hou.hipFile.save(save_to_recent_files=True)
|
||||
else:
|
||||
self.log.debug("No unsaved changes, skipping file save..")
|
||||
|
|
@ -3,7 +3,7 @@ import openpype.api
|
|||
|
||||
|
||||
class ValidateVDBInputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate that the node connected to the output node is of type VDB
|
||||
"""Validate that the node connected to the output node is of type VDB.
|
||||
|
||||
Regardless of the amount of VDBs create the output will need to have an
|
||||
equal amount of VDBs, points, primitives and vertices
|
||||
|
|
@ -24,8 +24,9 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Node connected to the output node is not"
|
||||
"of type VDB!")
|
||||
raise RuntimeError(
|
||||
"Node connected to the output node is not" "of type VDB!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
|
|||
|
|
@ -0,0 +1,132 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
|
||||
"""Validate Alembic ROP Primitive to Detail attribute is consistent.
|
||||
|
||||
The Alembic ROP crashes Houdini whenever an attribute in the "Primitive to
|
||||
Detail" parameter exists on only a part of the primitives that belong to
|
||||
the same hierarchy path. Whenever it encounters inconsistent values,
|
||||
specifically where some are empty as opposed to others then Houdini
|
||||
crashes. (Tested in Houdini 17.5.229)
|
||||
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder + 0.1
|
||||
families = ["pointcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Primitive to Detail (Abc)"
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Primitives found with inconsistent primitive "
|
||||
"to detail attributes. See log."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
output = instance.data["output_node"]
|
||||
|
||||
rop = instance[0]
|
||||
pattern = rop.parm("prim_to_detail_pattern").eval().strip()
|
||||
if not pattern:
|
||||
cls.log.debug(
|
||||
"Alembic ROP has no 'Primitive to Detail' pattern. "
|
||||
"Validation is ignored.."
|
||||
)
|
||||
return
|
||||
|
||||
build_from_path = rop.parm("build_from_path").eval()
|
||||
if not build_from_path:
|
||||
cls.log.debug(
|
||||
"Alembic ROP has 'Build from Path' disabled. "
|
||||
"Validation is ignored.."
|
||||
)
|
||||
return
|
||||
|
||||
path_attr = rop.parm("path_attrib").eval()
|
||||
if not path_attr:
|
||||
cls.log.error(
|
||||
"The Alembic ROP node has no Path Attribute"
|
||||
"value set, but 'Build Hierarchy from Attribute'"
|
||||
"is enabled."
|
||||
)
|
||||
return [rop.path()]
|
||||
|
||||
# Let's assume each attribute is explicitly named for now and has no
|
||||
# wildcards for Primitive to Detail. This simplifies the check.
|
||||
cls.log.debug("Checking Primitive to Detail pattern: %s" % pattern)
|
||||
cls.log.debug("Checking with path attribute: %s" % path_attr)
|
||||
|
||||
# Check if the primitive attribute exists
|
||||
frame = instance.data.get("startFrame", 0)
|
||||
geo = output.geometryAtFrame(frame)
|
||||
|
||||
# If there are no primitives on the start frame then it might be
|
||||
# something that is emitted over time. As such we can't actually
|
||||
# validate whether the attributes exist, because they won't exist
|
||||
# yet. In that case, just warn the user and allow it.
|
||||
if len(geo.iterPrims()) == 0:
|
||||
cls.log.warning(
|
||||
"No primitives found on current frame. Validation"
|
||||
" for Primitive to Detail will be skipped."
|
||||
)
|
||||
return
|
||||
|
||||
attrib = geo.findPrimAttrib(path_attr)
|
||||
if not attrib:
|
||||
cls.log.info(
|
||||
"Geometry Primitives are missing "
|
||||
"path attribute: `%s`" % path_attr
|
||||
)
|
||||
return [output.path()]
|
||||
|
||||
# Ensure at least a single string value is present
|
||||
if not attrib.strings():
|
||||
cls.log.info(
|
||||
"Primitive path attribute has no "
|
||||
"string values: %s" % path_attr
|
||||
)
|
||||
return [output.path()]
|
||||
|
||||
paths = None
|
||||
for attr in pattern.split(" "):
|
||||
if not attr.strip():
|
||||
# Ignore empty values
|
||||
continue
|
||||
|
||||
# Check if the primitive attribute exists
|
||||
attrib = geo.findPrimAttrib(attr)
|
||||
if not attrib:
|
||||
# It is allowed to not have the attribute at all
|
||||
continue
|
||||
|
||||
# The issue can only happen if at least one string attribute is
|
||||
# present. So we ignore cases with no values whatsoever.
|
||||
if not attrib.strings():
|
||||
continue
|
||||
|
||||
check = defaultdict(set)
|
||||
values = geo.primStringAttribValues(attr)
|
||||
if paths is None:
|
||||
paths = geo.primStringAttribValues(path_attr)
|
||||
|
||||
for path, value in zip(paths, values):
|
||||
check[path].add(value)
|
||||
|
||||
for path, values in check.items():
|
||||
# Whenever a single path has multiple values for the
|
||||
# Primitive to Detail attribute then we consider it
|
||||
# inconsistent and invalidate the ROP node's content.
|
||||
if len(values) > 1:
|
||||
cls.log.warning(
|
||||
"Path has multiple values: %s (path: %s)"
|
||||
% (list(values), path)
|
||||
)
|
||||
return [output.path()]
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
|
||||
class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin):
|
||||
"""Validate Face Sets are disabled for extraction to pointcache.
|
||||
|
||||
When groups are saved as Face Sets with the Alembic these show up
|
||||
as shadingEngine connections in Maya - however, with animated groups
|
||||
these connections in Maya won't work as expected, it won't update per
|
||||
frame. Additionally, it can break shader assignments in some cases
|
||||
where it requires to first break this connection to allow a shader to
|
||||
be assigned.
|
||||
|
||||
It is allowed to include Face Sets, so only an issue is logged to
|
||||
identify that it could introduce issues down the pipeline.
|
||||
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder + 0.1
|
||||
families = ["pointcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Alembic ROP Face Sets"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop = instance[0]
|
||||
facesets = rop.parm("facesets").eval()
|
||||
|
||||
# 0 = No Face Sets
|
||||
# 1 = Save Non-Empty Groups as Face Sets
|
||||
# 2 = Save All Groups As Face Sets
|
||||
if facesets != 0:
|
||||
self.log.warning(
|
||||
"Alembic ROP saves 'Face Sets' for Geometry. "
|
||||
"Are you sure you want this?"
|
||||
)
|
||||
|
|
@ -1,9 +1,9 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
import colorbleed.api
|
||||
|
||||
|
||||
class ValidateAlembicInputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate that the node connected to the output is correct
|
||||
"""Validate that the node connected to the output is correct.
|
||||
|
||||
The connected node cannot be of the following types for Alembic:
|
||||
- VDB
|
||||
|
|
@ -11,7 +11,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder + 0.1
|
||||
order = colorbleed.api.ValidateContentsOrder + 0.1
|
||||
families = ["pointcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Input Node (Abc)"
|
||||
|
|
@ -19,19 +19,35 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Node connected to the output node incorrect")
|
||||
raise RuntimeError(
|
||||
"Primitive types found that are not supported"
|
||||
"for Alembic output."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
invalid_nodes = ["VDB", "Volume"]
|
||||
invalid_prim_types = ["VDB", "Volume"]
|
||||
node = instance.data["output_node"]
|
||||
|
||||
prims = node.geometry().prims()
|
||||
if not hasattr(node, "geometry"):
|
||||
# In the case someone has explicitly set an Object
|
||||
# node instead of a SOP node in Geometry context
|
||||
# then for now we ignore - this allows us to also
|
||||
# export object transforms.
|
||||
cls.log.warning("No geometry output node found, skipping check..")
|
||||
return
|
||||
|
||||
for prim in prims:
|
||||
prim_type = prim.type().name()
|
||||
if prim_type in invalid_nodes:
|
||||
cls.log.error("Found a primitive which is of type '%s' !"
|
||||
% prim_type)
|
||||
return [instance]
|
||||
frame = instance.data.get("startFrame", 0)
|
||||
geo = node.geometryAtFrame(frame)
|
||||
|
||||
invalid = False
|
||||
for prim_type in invalid_prim_types:
|
||||
if geo.countPrimType(prim_type) > 0:
|
||||
cls.log.error(
|
||||
"Found a primitive which is of type '%s' !" % prim_type
|
||||
)
|
||||
invalid = True
|
||||
|
||||
if invalid:
|
||||
return [instance]
|
||||
|
|
|
|||
|
|
@ -29,8 +29,9 @@ class ValidateAnimationSettings(pyblish.api.InstancePlugin):
|
|||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Output settings do no match for '%s'" %
|
||||
instance)
|
||||
raise RuntimeError(
|
||||
"Output settings do no match for '%s'" % instance
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
|
|||
|
|
@ -18,12 +18,17 @@ class ValidateBypassed(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
if len(instance) == 0:
|
||||
# Ignore instances without any nodes
|
||||
# e.g. in memory bootstrap instances
|
||||
return
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
rop = invalid[0]
|
||||
raise RuntimeError(
|
||||
"ROP node %s is set to bypass, publishing cannot continue.." %
|
||||
rop.path()
|
||||
"ROP node %s is set to bypass, publishing cannot continue.."
|
||||
% rop.path()
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -6,9 +6,9 @@ class ValidateCameraROP(pyblish.api.InstancePlugin):
|
|||
"""Validate Camera ROP settings."""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
families = ['camera']
|
||||
hosts = ['houdini']
|
||||
label = 'Camera ROP'
|
||||
families = ["camera"]
|
||||
hosts = ["houdini"]
|
||||
label = "Camera ROP"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -16,8 +16,10 @@ class ValidateCameraROP(pyblish.api.InstancePlugin):
|
|||
|
||||
node = instance[0]
|
||||
if node.parm("use_sop_path").eval():
|
||||
raise RuntimeError("Alembic ROP for Camera export should not be "
|
||||
"set to 'Use Sop Path'. Please disable.")
|
||||
raise RuntimeError(
|
||||
"Alembic ROP for Camera export should not be "
|
||||
"set to 'Use Sop Path'. Please disable."
|
||||
)
|
||||
|
||||
# Get the root and objects parameter of the Alembic ROP node
|
||||
root = node.parm("root").eval()
|
||||
|
|
@ -34,8 +36,8 @@ class ValidateCameraROP(pyblish.api.InstancePlugin):
|
|||
if not camera:
|
||||
raise ValueError("Camera path does not exist: %s" % path)
|
||||
|
||||
if not camera.type().name() == "cam":
|
||||
raise ValueError("Object set in Alembic ROP is not a camera: "
|
||||
"%s (type: %s)" % (camera, camera.type().name()))
|
||||
|
||||
|
||||
if camera.type().name() != "cam":
|
||||
raise ValueError(
|
||||
"Object set in Alembic ROP is not a camera: "
|
||||
"%s (type: %s)" % (camera, camera.type().name())
|
||||
)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,60 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateCopOutputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate the instance COP Output Node.
|
||||
|
||||
This will ensure:
|
||||
- The COP Path is set.
|
||||
- The COP Path refers to an existing object.
|
||||
- The COP Path node is a COP node.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["imagesequence"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate COP Output Node"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Output node(s) `%s` are incorrect. "
|
||||
"See plug-in log for details." % invalid
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
import hou
|
||||
|
||||
output_node = instance.data["output_node"]
|
||||
|
||||
if output_node is None:
|
||||
node = instance[0]
|
||||
cls.log.error(
|
||||
"COP Output node in '%s' does not exist. "
|
||||
"Ensure a valid COP output path is set." % node.path()
|
||||
)
|
||||
|
||||
return [node.path()]
|
||||
|
||||
# Output node must be a Sop node.
|
||||
if not isinstance(output_node, hou.CopNode):
|
||||
cls.log.error(
|
||||
"Output node %s is not a COP node. "
|
||||
"COP Path must point to a COP node, "
|
||||
"instead found category type: %s"
|
||||
% (output_node.path(), output_node.type().category().name())
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
||||
# For the sake of completeness also assert the category type
|
||||
# is Cop2 to avoid potential edge case scenarios even though
|
||||
# the isinstance check above should be stricter than this category
|
||||
assert output_node.type().category().name() == "Cop2", (
|
||||
"Output node %s is not of category Cop2. This is a bug.."
|
||||
% output_node.path()
|
||||
)
|
||||
|
|
@ -0,0 +1,59 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
from openpype.hosts.houdini.api import lib
|
||||
|
||||
|
||||
class ValidateFileExtension(pyblish.api.InstancePlugin):
|
||||
"""Validate the output file extension fits the output family.
|
||||
|
||||
File extensions:
|
||||
- Pointcache must be .abc
|
||||
- Camera must be .abc
|
||||
- VDB must be .vdb
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["pointcache", "camera", "vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Output File Extension"
|
||||
|
||||
family_extensions = {
|
||||
"pointcache": ".abc",
|
||||
"camera": ".abc",
|
||||
"vdbcache": ".vdb",
|
||||
}
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"ROP node has incorrect " "file extension: %s" % invalid
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
# Get ROP node from instance
|
||||
node = instance[0]
|
||||
|
||||
# Create lookup for current family in instance
|
||||
families = []
|
||||
family = instance.data.get("family", None)
|
||||
if family:
|
||||
families.append(family)
|
||||
families = set(families)
|
||||
|
||||
# Perform extension check
|
||||
output = lib.get_output_parameter(node).eval()
|
||||
_, output_extension = os.path.splitext(output)
|
||||
|
||||
for family in families:
|
||||
extension = cls.family_extensions.get(family, None)
|
||||
if extension is None:
|
||||
raise RuntimeError("Unsupported family: %s" % family)
|
||||
|
||||
if output_extension != extension:
|
||||
return [node.path()]
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.hosts.houdini.api import lib
|
||||
|
||||
|
||||
class ValidateFrameToken(pyblish.api.InstancePlugin):
|
||||
"""Validate if the unexpanded string contains the frame ('$F') token.
|
||||
|
||||
This validator will *only* check the output parameter of the node if
|
||||
the Valid Frame Range is not set to 'Render Current Frame'
|
||||
|
||||
Rules:
|
||||
If you render out a frame range it is mandatory to have the
|
||||
frame token - '$F4' or similar - to ensure that each frame gets
|
||||
written. If this is not the case you will override the same file
|
||||
every time a frame is written out.
|
||||
|
||||
Examples:
|
||||
Good: 'my_vbd_cache.$F4.vdb'
|
||||
Bad: 'my_vbd_cache.vdb'
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Frame Token"
|
||||
families = ["vdbcache"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Output settings do no match for '%s'" % instance
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
node = instance[0]
|
||||
|
||||
# Check trange parm, 0 means Render Current Frame
|
||||
frame_range = node.evalParm("trange")
|
||||
if frame_range == 0:
|
||||
return []
|
||||
|
||||
output_parm = lib.get_output_parameter(node)
|
||||
unexpanded_str = output_parm.unexpandedString()
|
||||
|
||||
if "$F" not in unexpanded_str:
|
||||
cls.log.error("No frame token found in '%s'" % node.path())
|
||||
return [instance]
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin):
|
||||
"""Validate the Houdini instance runs a Commercial license.
|
||||
|
||||
When extracting USD files from a non-commercial Houdini license, even with
|
||||
Houdini Indie license, the resulting files will get "scrambled" with
|
||||
a license protection and get a special .usdnc or .usdlc suffix.
|
||||
|
||||
This currently breaks the Subset/representation pipeline so we disallow
|
||||
any publish with those licenses. Only the commercial license is valid.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usd"]
|
||||
hosts = ["houdini"]
|
||||
label = "Houdini Commercial License"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
import hou
|
||||
|
||||
license = hou.licenseCategory()
|
||||
if license != hou.licenseCategoryType.Commercial:
|
||||
raise RuntimeError(
|
||||
"USD Publishing requires a full Commercial "
|
||||
"license. You are on: %s" % license
|
||||
)
|
||||
|
|
@ -6,18 +6,18 @@ class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin):
|
|||
"""Validate Create Intermediate Directories is enabled on ROP node."""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
families = ['pointcache',
|
||||
'camera',
|
||||
'vdbcache']
|
||||
hosts = ['houdini']
|
||||
label = 'Create Intermediate Directories Checked'
|
||||
families = ["pointcache", "camera", "vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Create Intermediate Directories Checked"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Found ROP node with Create Intermediate "
|
||||
"Directories turned off: %s" % invalid)
|
||||
raise RuntimeError(
|
||||
"Found ROP node with Create Intermediate "
|
||||
"Directories turned off: %s" % invalid
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
|
|||
65
openpype/hosts/houdini/plugins/publish/validate_no_errors.py
Normal file
65
openpype/hosts/houdini/plugins/publish/validate_no_errors.py
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
import hou
|
||||
|
||||
|
||||
def cook_in_range(node, start, end):
|
||||
current = hou.intFrame()
|
||||
if start >= current >= end:
|
||||
# Allow cooking current frame since we're in frame range
|
||||
node.cook(force=False)
|
||||
else:
|
||||
node.cook(force=False, frame_range=(start, start))
|
||||
|
||||
|
||||
def get_errors(node):
|
||||
"""Get cooking errors.
|
||||
|
||||
If node already has errors check whether it needs to recook
|
||||
If so, then recook first to see if that solves it.
|
||||
|
||||
"""
|
||||
if node.errors() and node.needsToCook():
|
||||
node.cook()
|
||||
|
||||
return node.errors()
|
||||
|
||||
|
||||
class ValidateNoErrors(pyblish.api.InstancePlugin):
|
||||
"""Validate the Instance has no current cooking errors."""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
hosts = ["houdini"]
|
||||
label = "Validate no errors"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
validate_nodes = []
|
||||
|
||||
if len(instance) > 0:
|
||||
validate_nodes.append(instance[0])
|
||||
output_node = instance.data.get("output_node")
|
||||
if output_node:
|
||||
validate_nodes.append(output_node)
|
||||
|
||||
for node in validate_nodes:
|
||||
self.log.debug("Validating for errors: %s" % node.path())
|
||||
errors = get_errors(node)
|
||||
|
||||
if errors:
|
||||
# If there are current errors, then try an unforced cook
|
||||
# to see whether the error will disappear.
|
||||
self.log.debug(
|
||||
"Recooking to revalidate error "
|
||||
"is up to date for: %s" % node.path()
|
||||
)
|
||||
current_frame = hou.intFrame()
|
||||
start = instance.data.get("frameStart", current_frame)
|
||||
end = instance.data.get("frameEnd", current_frame)
|
||||
cook_in_range(node, start=start, end=end)
|
||||
|
||||
# Check for errors again after the forced recook
|
||||
errors = get_errors(node)
|
||||
if errors:
|
||||
self.log.error(errors)
|
||||
raise RuntimeError("Node has errors: %s" % node.path())
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
|
||||
class ValidatOutputNodeExists(pyblish.api.InstancePlugin):
|
||||
"""Validate if node attribute Create intermediate Directories is turned on
|
||||
|
||||
Rules:
|
||||
* The node must have Create intermediate Directories turned on to
|
||||
ensure the output file will be created
|
||||
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
families = ["*"]
|
||||
hosts = ['houdini']
|
||||
label = "Output Node Exists"
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Could not find output node(s)!")
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
import hou
|
||||
|
||||
result = set()
|
||||
|
||||
node = instance[0]
|
||||
if node.type().name() == "alembic":
|
||||
soppath_parm = "sop_path"
|
||||
else:
|
||||
# Fall back to geometry node
|
||||
soppath_parm = "soppath"
|
||||
|
||||
sop_path = node.parm(soppath_parm).eval()
|
||||
output_node = hou.node(sop_path)
|
||||
|
||||
if output_node is None:
|
||||
cls.log.error("Node at '%s' does not exist" % sop_path)
|
||||
result.add(node.path())
|
||||
|
||||
# Added cam as this is a legit output type (cameras can't
|
||||
if output_node.type().name() not in ["output", "cam"]:
|
||||
cls.log.error("SOP Path does not end path at output node")
|
||||
result.add(node.path())
|
||||
|
||||
return result
|
||||
|
|
@ -14,8 +14,7 @@ class ValidateOutputNode(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["pointcache",
|
||||
"vdbcache"]
|
||||
families = ["pointcache", "vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Output Node"
|
||||
|
||||
|
|
@ -23,8 +22,10 @@ class ValidateOutputNode(pyblish.api.InstancePlugin):
|
|||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Output node(s) `%s` are incorrect. "
|
||||
"See plug-in log for details." % invalid)
|
||||
raise RuntimeError(
|
||||
"Output node(s) `%s` are incorrect. "
|
||||
"See plug-in log for details." % invalid
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
@ -35,39 +36,42 @@ class ValidateOutputNode(pyblish.api.InstancePlugin):
|
|||
|
||||
if output_node is None:
|
||||
node = instance[0]
|
||||
cls.log.error("SOP Output node in '%s' does not exist. "
|
||||
"Ensure a valid SOP output path is set."
|
||||
% node.path())
|
||||
cls.log.error(
|
||||
"SOP Output node in '%s' does not exist. "
|
||||
"Ensure a valid SOP output path is set." % node.path()
|
||||
)
|
||||
|
||||
return [node.path()]
|
||||
|
||||
# Output node must be a Sop node.
|
||||
if not isinstance(output_node, hou.SopNode):
|
||||
cls.log.error("Output node %s is not a SOP node. "
|
||||
"SOP Path must point to a SOP node, "
|
||||
"instead found category type: %s" % (
|
||||
output_node.path(),
|
||||
output_node.type().category().name()
|
||||
)
|
||||
)
|
||||
cls.log.error(
|
||||
"Output node %s is not a SOP node. "
|
||||
"SOP Path must point to a SOP node, "
|
||||
"instead found category type: %s"
|
||||
% (output_node.path(), output_node.type().category().name())
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
||||
# For the sake of completeness also assert the category type
|
||||
# is Sop to avoid potential edge case scenarios even though
|
||||
# the isinstance check above should be stricter than this category
|
||||
assert output_node.type().category().name() == "Sop", (
|
||||
"Output node %s is not of category Sop. This is a bug.." %
|
||||
output_node.path()
|
||||
"Output node %s is not of category Sop. This is a bug.."
|
||||
% output_node.path()
|
||||
)
|
||||
|
||||
# Check if output node has incoming connections
|
||||
if not output_node.inputConnections():
|
||||
cls.log.error("Output node `%s` has no incoming connections"
|
||||
% output_node.path())
|
||||
cls.log.error(
|
||||
"Output node `%s` has no incoming connections"
|
||||
% output_node.path()
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
||||
# Ensure the output node has at least Geometry data
|
||||
if not output_node.geometry():
|
||||
cls.log.error("Output node `%s` has no geometry data."
|
||||
% output_node.path())
|
||||
cls.log.error(
|
||||
"Output node `%s` has no geometry data." % output_node.path()
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
|
|
|||
|
|
@ -19,8 +19,9 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("See log for details. "
|
||||
"Invalid nodes: {0}".format(invalid))
|
||||
raise RuntimeError(
|
||||
"See log for details. " "Invalid nodes: {0}".format(invalid)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
@ -28,48 +29,68 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
|
|||
import hou
|
||||
|
||||
output = instance.data["output_node"]
|
||||
prims = output.geometry().prims()
|
||||
|
||||
rop = instance[0]
|
||||
build_from_path = rop.parm("build_from_path").eval()
|
||||
if not build_from_path:
|
||||
cls.log.debug("Alembic ROP has 'Build from Path' disabled. "
|
||||
"Validation is ignored..")
|
||||
cls.log.debug(
|
||||
"Alembic ROP has 'Build from Path' disabled. "
|
||||
"Validation is ignored.."
|
||||
)
|
||||
return
|
||||
|
||||
path_attr = rop.parm("path_attrib").eval()
|
||||
if not path_attr:
|
||||
cls.log.error("The Alembic ROP node has no Path Attribute"
|
||||
"value set, but 'Build Hierarchy from Attribute'"
|
||||
"is enabled.")
|
||||
cls.log.error(
|
||||
"The Alembic ROP node has no Path Attribute"
|
||||
"value set, but 'Build Hierarchy from Attribute'"
|
||||
"is enabled."
|
||||
)
|
||||
return [rop.path()]
|
||||
|
||||
cls.log.debug("Checking for attribute: %s" % path_attr)
|
||||
|
||||
missing_attr = []
|
||||
invalid_attr = []
|
||||
for prim in prims:
|
||||
# Check if the primitive attribute exists
|
||||
frame = instance.data.get("startFrame", 0)
|
||||
geo = output.geometryAtFrame(frame)
|
||||
|
||||
try:
|
||||
path = prim.stringAttribValue(path_attr)
|
||||
except hou.OperationFailed:
|
||||
# Attribute does not exist.
|
||||
missing_attr.append(prim)
|
||||
continue
|
||||
# If there are no primitives on the current frame then we can't
|
||||
# check whether the path names are correct. So we'll just issue a
|
||||
# warning that the check can't be done consistently and skip
|
||||
# validation.
|
||||
if len(geo.iterPrims()) == 0:
|
||||
cls.log.warning(
|
||||
"No primitives found on current frame. Validation"
|
||||
" for primitive hierarchy paths will be skipped,"
|
||||
" thus can't be validated."
|
||||
)
|
||||
return
|
||||
|
||||
if not path:
|
||||
# Empty path value is invalid.
|
||||
invalid_attr.append(prim)
|
||||
continue
|
||||
|
||||
if missing_attr:
|
||||
cls.log.info("Prims are missing attribute `%s`" % path_attr)
|
||||
|
||||
if invalid_attr:
|
||||
cls.log.info("Prims have no value for attribute `%s` "
|
||||
"(%s of %s prims)" % (path_attr,
|
||||
len(invalid_attr),
|
||||
len(prims)))
|
||||
|
||||
if missing_attr or invalid_attr:
|
||||
# Check if there are any values for the primitives
|
||||
attrib = geo.findPrimAttrib(path_attr)
|
||||
if not attrib:
|
||||
cls.log.info(
|
||||
"Geometry Primitives are missing "
|
||||
"path attribute: `%s`" % path_attr
|
||||
)
|
||||
return [output.path()]
|
||||
|
||||
# Ensure at least a single string value is present
|
||||
if not attrib.strings():
|
||||
cls.log.info(
|
||||
"Primitive path attribute has no "
|
||||
"string values: %s" % path_attr
|
||||
)
|
||||
return [output.path()]
|
||||
|
||||
paths = geo.primStringAttribValues(path_attr)
|
||||
# Ensure all primitives are set to a valid path
|
||||
# Collect all invalid primitive numbers
|
||||
invalid_prims = [i for i, path in enumerate(paths) if not path]
|
||||
if invalid_prims:
|
||||
num_prims = len(geo.iterPrims()) # faster than len(geo.prims())
|
||||
cls.log.info(
|
||||
"Prims have no value for attribute `%s` "
|
||||
"(%s of %s prims)" % (path_attr, len(invalid_prims), num_prims)
|
||||
)
|
||||
return [output.path()]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,43 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
from openpype.hosts.houdini.api import lib
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin):
|
||||
"""Validate the remote publish out node exists for Deadline to trigger."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder - 0.4
|
||||
families = ["*"]
|
||||
hosts = ["houdini"]
|
||||
targets = ["deadline"]
|
||||
label = "Remote Publish ROP node"
|
||||
actions = [openpype.api.RepairContextAction]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
cmd = "import colorbleed.lib; colorbleed.lib.publish_remote()"
|
||||
|
||||
node = hou.node("/out/REMOTE_PUBLISH")
|
||||
if not node:
|
||||
raise RuntimeError("Missing REMOTE_PUBLISH node.")
|
||||
|
||||
# We ensure it's a shell node and that it has the pre-render script
|
||||
# set correctly. Plus the shell script it will trigger should be
|
||||
# completely empty (doing nothing)
|
||||
assert node.type().name() == "shell", "Must be shell ROP node"
|
||||
assert node.parm("command").eval() == "", "Must have no command"
|
||||
assert not node.parm("shellexec").eval(), "Must not execute in shell"
|
||||
assert (
|
||||
node.parm("prerender").eval() == cmd
|
||||
), "REMOTE_PUBLISH node does not have correct prerender script."
|
||||
assert (
|
||||
node.parm("lprerender").eval() == "python"
|
||||
), "REMOTE_PUBLISH node prerender script type not set to 'python'"
|
||||
|
||||
@classmethod
|
||||
def repair(cls, context):
|
||||
"""(Re)create the node if it fails to pass validation."""
|
||||
lib.create_remote_publish_node(force=True)
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin):
|
||||
"""Validate the remote publish node is *not* bypassed."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder - 0.39
|
||||
families = ["*"]
|
||||
hosts = ["houdini"]
|
||||
targets = ["deadline"]
|
||||
label = "Remote Publish ROP enabled"
|
||||
actions = [openpype.api.RepairContextAction]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
node = hou.node("/out/REMOTE_PUBLISH")
|
||||
if not node:
|
||||
raise RuntimeError("Missing REMOTE_PUBLISH node.")
|
||||
|
||||
if node.isBypassed():
|
||||
raise RuntimeError("REMOTE_PUBLISH must not be bypassed.")
|
||||
|
||||
@classmethod
|
||||
def repair(cls, context):
|
||||
"""(Re)create the node if it fails to pass validation."""
|
||||
|
||||
node = hou.node("/out/REMOTE_PUBLISH")
|
||||
if not node:
|
||||
raise RuntimeError("Missing REMOTE_PUBLISH node.")
|
||||
|
||||
cls.log.info("Disabling bypass on /out/REMOTE_PUBLISH")
|
||||
node.bypass(False)
|
||||
|
|
@ -0,0 +1,80 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateSopOutputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate the instance SOP Output Node.
|
||||
|
||||
This will ensure:
|
||||
- The SOP Path is set.
|
||||
- The SOP Path refers to an existing object.
|
||||
- The SOP Path node is a SOP node.
|
||||
- The SOP Path node has at least one input connection (has an input)
|
||||
- The SOP Path has geometry data.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["pointcache", "vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Output Node"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Output node(s) `%s` are incorrect. "
|
||||
"See plug-in log for details." % invalid
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
import hou
|
||||
|
||||
output_node = instance.data["output_node"]
|
||||
|
||||
if output_node is None:
|
||||
node = instance[0]
|
||||
cls.log.error(
|
||||
"SOP Output node in '%s' does not exist. "
|
||||
"Ensure a valid SOP output path is set." % node.path()
|
||||
)
|
||||
|
||||
return [node.path()]
|
||||
|
||||
# Output node must be a Sop node.
|
||||
if not isinstance(output_node, hou.SopNode):
|
||||
cls.log.error(
|
||||
"Output node %s is not a SOP node. "
|
||||
"SOP Path must point to a SOP node, "
|
||||
"instead found category type: %s"
|
||||
% (output_node.path(), output_node.type().category().name())
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
||||
# For the sake of completeness also assert the category type
|
||||
# is Sop to avoid potential edge case scenarios even though
|
||||
# the isinstance check above should be stricter than this category
|
||||
assert output_node.type().category().name() == "Sop", (
|
||||
"Output node %s is not of category Sop. This is a bug.."
|
||||
% output_node.path()
|
||||
)
|
||||
|
||||
# Ensure the node is cooked and succeeds to cook so we can correctly
|
||||
# check for its geometry data.
|
||||
if output_node.needsToCook():
|
||||
cls.log.debug("Cooking node: %s" % output_node.path())
|
||||
try:
|
||||
output_node.cook()
|
||||
except hou.Error as exc:
|
||||
cls.log.error("Cook failed: %s" % exc)
|
||||
cls.log.error(output_node.errors()[0])
|
||||
return [output_node.path()]
|
||||
|
||||
# Ensure the output node has at least Geometry data
|
||||
if not output_node.geometry():
|
||||
cls.log.error(
|
||||
"Output node `%s` has no geometry data." % output_node.path()
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
import pyblish.api
|
||||
|
||||
import openpype.hosts.houdini.api.usd as hou_usdlib
|
||||
|
||||
|
||||
class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin):
|
||||
"""Validate USD loaded paths have no backslashes.
|
||||
|
||||
This is a crucial validation for HUSK USD rendering as Houdini's
|
||||
USD Render ROP will fail to write out a .usd file for rendering that
|
||||
correctly preserves the backslashes, e.g. it will incorrectly convert a
|
||||
'\t' to a TAB character disallowing HUSK to find those specific files.
|
||||
|
||||
This validation is redundant for usdModel since that flattens the model
|
||||
before write. As such it will never have any used layers with a path.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdSetDress", "usdShade", "usd", "usdrender"]
|
||||
hosts = ["houdini"]
|
||||
label = "USD Layer path backslashes"
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop = instance[0]
|
||||
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
|
||||
stage = lop_path.stage(apply_viewport_overrides=False)
|
||||
|
||||
invalid = []
|
||||
for layer in stage.GetUsedLayers():
|
||||
references = layer.externalReferences
|
||||
|
||||
for ref in references:
|
||||
|
||||
# Ignore anonymous layers
|
||||
if ref.startswith("anon:"):
|
||||
continue
|
||||
|
||||
# If any backslashes in the path consider it invalid
|
||||
if "\\" in ref:
|
||||
self.log.error("Found invalid path: %s" % ref)
|
||||
invalid.append(layer)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Loaded layers have backslashes. "
|
||||
"This is invalid for HUSK USD rendering."
|
||||
)
|
||||
|
|
@ -0,0 +1,76 @@
|
|||
import pyblish.api
|
||||
|
||||
import openpype.hosts.houdini.api.usd as hou_usdlib
|
||||
|
||||
|
||||
from pxr import UsdShade, UsdRender, UsdLux
|
||||
|
||||
|
||||
def fullname(o):
|
||||
"""Get fully qualified class name"""
|
||||
module = o.__module__
|
||||
if module is None or module == str.__module__:
|
||||
return o.__name__
|
||||
return module + "." + o.__name__
|
||||
|
||||
|
||||
class ValidateUsdModel(pyblish.api.InstancePlugin):
|
||||
"""Validate USD Model.
|
||||
|
||||
Disallow Shaders, Render settings, products and vars and Lux lights.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdModel"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate USD Model"
|
||||
optional = True
|
||||
|
||||
disallowed = [
|
||||
UsdShade.Shader,
|
||||
UsdRender.Settings,
|
||||
UsdRender.Product,
|
||||
UsdRender.Var,
|
||||
UsdLux.Light,
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop = instance[0]
|
||||
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
|
||||
stage = lop_path.stage(apply_viewport_overrides=False)
|
||||
|
||||
invalid = []
|
||||
for prim in stage.Traverse():
|
||||
|
||||
for klass in self.disallowed:
|
||||
if klass(prim):
|
||||
# Get full class name without pxr. prefix
|
||||
name = fullname(klass).split("pxr.", 1)[-1]
|
||||
path = str(prim.GetPath())
|
||||
self.log.warning("Disallowed %s: %s" % (name, path))
|
||||
|
||||
invalid.append(prim)
|
||||
|
||||
if invalid:
|
||||
prim_paths = sorted([str(prim.GetPath()) for prim in invalid])
|
||||
raise RuntimeError("Found invalid primitives: %s" % prim_paths)
|
||||
|
||||
|
||||
class ValidateUsdShade(ValidateUsdModel):
|
||||
"""Validate usdShade.
|
||||
|
||||
Disallow Render settings, products, vars and Lux lights.
|
||||
|
||||
"""
|
||||
|
||||
families = ["usdShade"]
|
||||
label = "Validate USD Shade"
|
||||
|
||||
disallowed = [
|
||||
UsdRender.Settings,
|
||||
UsdRender.Product,
|
||||
UsdRender.Var,
|
||||
UsdLux.Light,
|
||||
]
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateUSDOutputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate the instance USD LOPs Output Node.
|
||||
|
||||
This will ensure:
|
||||
- The LOP Path is set.
|
||||
- The LOP Path refers to an existing object.
|
||||
- The LOP Path node is a LOP node.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usd"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Output Node (USD)"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Output node(s) `%s` are incorrect. "
|
||||
"See plug-in log for details." % invalid
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
import hou
|
||||
|
||||
output_node = instance.data["output_node"]
|
||||
|
||||
if output_node is None:
|
||||
node = instance[0]
|
||||
cls.log.error(
|
||||
"USD node '%s' LOP path does not exist. "
|
||||
"Ensure a valid LOP path is set." % node.path()
|
||||
)
|
||||
|
||||
return [node.path()]
|
||||
|
||||
# Output node must be a Sop node.
|
||||
if not isinstance(output_node, hou.LopNode):
|
||||
cls.log.error(
|
||||
"Output node %s is not a LOP node. "
|
||||
"LOP Path must point to a LOP node, "
|
||||
"instead found category type: %s"
|
||||
% (output_node.path(), output_node.type().category().name())
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
import pyblish.api
|
||||
|
||||
import os
|
||||
|
||||
|
||||
class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin):
|
||||
"""Validate USD Render Product names are correctly set absolute paths."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdrender"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate USD Render Product Names"
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = []
|
||||
for filepath in instance.data["files"]:
|
||||
|
||||
if not filepath:
|
||||
invalid.append("Detected empty output filepath.")
|
||||
|
||||
if not os.path.isabs(filepath):
|
||||
invalid.append(
|
||||
"Output file path is not " "absolute path: %s" % filepath
|
||||
)
|
||||
|
||||
if invalid:
|
||||
for message in invalid:
|
||||
self.log.error(message)
|
||||
raise RuntimeError("USD Render Paths are invalid.")
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
import pyblish.api
|
||||
|
||||
import openpype.hosts.houdini.api.usd as hou_usdlib
|
||||
|
||||
|
||||
class ValidateUsdSetDress(pyblish.api.InstancePlugin):
|
||||
"""Validate USD Set Dress.
|
||||
|
||||
Must only have references or payloads. May not generate new mesh or
|
||||
flattened meshes.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdSetDress"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate USD Set Dress"
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
from pxr import UsdGeom
|
||||
|
||||
rop = instance[0]
|
||||
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
|
||||
stage = lop_path.stage(apply_viewport_overrides=False)
|
||||
|
||||
invalid = []
|
||||
for node in stage.Traverse():
|
||||
|
||||
if UsdGeom.Mesh(node):
|
||||
# This solely checks whether there is any USD involved
|
||||
# in this Prim's Stack and doesn't accurately tell us
|
||||
# whether it was generated locally or not.
|
||||
# TODO: More accurately track whether the Prim was created
|
||||
# in the local scene
|
||||
stack = node.GetPrimStack()
|
||||
for sdf in stack:
|
||||
path = sdf.layer.realPath
|
||||
if path:
|
||||
break
|
||||
else:
|
||||
prim_path = node.GetPath()
|
||||
self.log.error(
|
||||
"%s is not referenced geometry." % prim_path
|
||||
)
|
||||
invalid.append(node)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"SetDress contains local geometry. "
|
||||
"This is not allowed, it must be an assembly "
|
||||
"of referenced assets."
|
||||
)
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
import re
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
from avalon import io
|
||||
|
||||
|
||||
class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin):
|
||||
"""Validate the Instance has no current cooking errors."""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
hosts = ["houdini"]
|
||||
families = ["usdShade"]
|
||||
label = "USD Shade model exists"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
asset = instance.data["asset"]
|
||||
subset = instance.data["subset"]
|
||||
|
||||
# Assume shading variation starts after a dot separator
|
||||
shade_subset = subset.split(".", 1)[0]
|
||||
model_subset = re.sub("^usdShade", "usdModel", shade_subset)
|
||||
|
||||
asset_doc = io.find_one({"name": asset, "type": "asset"})
|
||||
if not asset_doc:
|
||||
raise RuntimeError("Asset does not exist: %s" % asset)
|
||||
|
||||
subset_doc = io.find_one(
|
||||
{
|
||||
"name": model_subset,
|
||||
"type": "subset",
|
||||
"parent": asset_doc["_id"],
|
||||
}
|
||||
)
|
||||
if not subset_doc:
|
||||
raise RuntimeError(
|
||||
"USD Model subset not found: "
|
||||
"%s (%s)" % (model_subset, asset)
|
||||
)
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin):
|
||||
"""Validate USD Shading Workspace is correct version.
|
||||
|
||||
There have been some issues with outdated/erroneous Shading Workspaces
|
||||
so this is to confirm everything is set as it should.
|
||||
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
hosts = ["houdini"]
|
||||
families = ["usdShade"]
|
||||
label = "USD Shade Workspace"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop = instance[0]
|
||||
workspace = rop.parent()
|
||||
|
||||
definition = workspace.type().definition()
|
||||
name = definition.nodeType().name()
|
||||
library = definition.libraryFilePath()
|
||||
|
||||
all_definitions = hou.hda.definitionsInFile(library)
|
||||
node_type, version = name.rsplit(":", 1)
|
||||
version = float(version)
|
||||
|
||||
highest = version
|
||||
for other_definition in all_definitions:
|
||||
other_name = other_definition.nodeType().name()
|
||||
other_node_type, other_version = other_name.rsplit(":", 1)
|
||||
other_version = float(other_version)
|
||||
|
||||
if node_type != other_node_type:
|
||||
continue
|
||||
|
||||
# Get highest version
|
||||
highest = max(highest, other_version)
|
||||
|
||||
if version != highest:
|
||||
raise RuntimeError(
|
||||
"Shading Workspace is not the latest version."
|
||||
" Found %s. Latest is %s." % (version, highest)
|
||||
)
|
||||
|
||||
# There were some issues with the editable node not having the right
|
||||
# configured path. So for now let's assure that is correct to.from
|
||||
value = (
|
||||
'avalon://`chs("../asset_name")`/'
|
||||
'usdShade`chs("../model_variantname1")`.usd'
|
||||
)
|
||||
rop_value = rop.parm("lopoutput").rawValue()
|
||||
if rop_value != value:
|
||||
raise RuntimeError(
|
||||
"Shading Workspace has invalid 'lopoutput'"
|
||||
" parameter value. The Shading Workspace"
|
||||
" needs to be reset to its default values."
|
||||
)
|
||||
|
|
@ -3,7 +3,7 @@ import openpype.api
|
|||
|
||||
|
||||
class ValidateVDBInputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate that the node connected to the output node is of type VDB
|
||||
"""Validate that the node connected to the output node is of type VDB.
|
||||
|
||||
Regardless of the amount of VDBs create the output will need to have an
|
||||
equal amount of VDBs, points, primitives and vertices
|
||||
|
|
@ -24,8 +24,9 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Node connected to the output node is not"
|
||||
"of type VDB!")
|
||||
raise RuntimeError(
|
||||
"Node connected to the output node is not" "of type VDB!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
|
|||
|
|
@ -0,0 +1,73 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
import hou
|
||||
|
||||
|
||||
class ValidateVDBOutputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate that the node connected to the output node is of type VDB.
|
||||
|
||||
Regardless of the amount of VDBs create the output will need to have an
|
||||
equal amount of VDBs, points, primitives and vertices
|
||||
|
||||
A VDB is an inherited type of Prim, holds the following data:
|
||||
- Primitives: 1
|
||||
- Points: 1
|
||||
- Vertices: 1
|
||||
- VDBs: 1
|
||||
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder + 0.1
|
||||
families = ["vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Output Node (VDB)"
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Node connected to the output node is not" " of type VDB!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
node = instance.data["output_node"]
|
||||
if node is None:
|
||||
cls.log.error(
|
||||
"SOP path is not correctly set on "
|
||||
"ROP node '%s'." % instance[0].path()
|
||||
)
|
||||
return [instance]
|
||||
|
||||
frame = instance.data.get("frameStart", 0)
|
||||
geometry = node.geometryAtFrame(frame)
|
||||
if geometry is None:
|
||||
# No geometry data on this node, maybe the node hasn't cooked?
|
||||
cls.log.error(
|
||||
"SOP node has no geometry data. "
|
||||
"Is it cooked? %s" % node.path()
|
||||
)
|
||||
return [node]
|
||||
|
||||
prims = geometry.prims()
|
||||
nr_of_prims = len(prims)
|
||||
|
||||
# All primitives must be hou.VDB
|
||||
invalid_prim = False
|
||||
for prim in prims:
|
||||
if not isinstance(prim, hou.VDB):
|
||||
cls.log.error("Found non-VDB primitive: %s" % prim)
|
||||
invalid_prim = True
|
||||
if invalid_prim:
|
||||
return [instance]
|
||||
|
||||
nr_of_points = len(geometry.points())
|
||||
if nr_of_points != nr_of_prims:
|
||||
cls.log.error("The number of primitives and points do not match")
|
||||
return [instance]
|
||||
|
||||
for prim in prims:
|
||||
if prim.numVertices() != 1:
|
||||
cls.log.error("Found primitive with more than 1 vertex!")
|
||||
return [instance]
|
||||
|
|
@ -1,5 +1,4 @@
|
|||
from avalon import api, houdini
|
||||
import hou
|
||||
|
||||
|
||||
def main():
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue