mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'pypeclub:develop' into bugfix/rig-deformer-ids
This commit is contained in:
commit
5e7b09e863
1300 changed files with 86420 additions and 400144 deletions
7
.github/workflows/documentation.yml
vendored
7
.github/workflows/documentation.yml
vendored
|
|
@ -20,7 +20,8 @@ jobs:
|
|||
- uses: actions/checkout@v1
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '12.x'
|
||||
node-version: 14.x
|
||||
cache: yarn
|
||||
- name: Test Build
|
||||
run: |
|
||||
cd website
|
||||
|
|
@ -41,8 +42,8 @@ jobs:
|
|||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '12.x'
|
||||
|
||||
node-version: 14.x
|
||||
cache: yarn
|
||||
- name: 🔨 Build
|
||||
run: |
|
||||
cd website
|
||||
|
|
|
|||
2
.github/workflows/prerelease.yml
vendored
2
.github/workflows/prerelease.yml
vendored
|
|
@ -43,7 +43,7 @@ jobs:
|
|||
uses: heinrichreimer/github-changelog-generator-action@v2.2
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}}'
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
|
||||
issues: false
|
||||
issuesWoLabels: false
|
||||
sinceTag: "3.0.0"
|
||||
|
|
|
|||
4
.github/workflows/release.yml
vendored
4
.github/workflows/release.yml
vendored
|
|
@ -39,7 +39,7 @@ jobs:
|
|||
uses: heinrichreimer/github-changelog-generator-action@v2.2
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
addSections: '{"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]},"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]}}'
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
|
||||
issues: false
|
||||
issuesWoLabels: false
|
||||
sinceTag: "3.0.0"
|
||||
|
|
@ -81,7 +81,7 @@ jobs:
|
|||
uses: heinrichreimer/github-changelog-generator-action@v2.2
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}}'
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
|
||||
issues: false
|
||||
issuesWoLabels: false
|
||||
sinceTag: ${{ steps.version.outputs.last_release }}
|
||||
|
|
|
|||
11
.gitmodules
vendored
11
.gitmodules
vendored
|
|
@ -1,12 +1,3 @@
|
|||
[submodule "repos/avalon-core"]
|
||||
path = repos/avalon-core
|
||||
url = https://github.com/pypeclub/avalon-core.git
|
||||
[submodule "repos/avalon-unreal-integration"]
|
||||
path = repos/avalon-unreal-integration
|
||||
url = https://github.com/pypeclub/avalon-unreal-integration.git
|
||||
[submodule "openpype/modules/default_modules/ftrack/python2_vendor/arrow"]
|
||||
path = openpype/modules/default_modules/ftrack/python2_vendor/arrow
|
||||
url = https://github.com/arrow-py/arrow.git
|
||||
[submodule "openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api"]
|
||||
path = openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api
|
||||
url = https://bitbucket.org/ftrack/ftrack-python-api.git
|
||||
url = https://github.com/pypeclub/avalon-core.git
|
||||
161
CHANGELOG.md
161
CHANGELOG.md
|
|
@ -1,96 +1,97 @@
|
|||
# Changelog
|
||||
|
||||
## [3.8.0-nightly.6](https://github.com/pypeclub/OpenPype/tree/HEAD)
|
||||
## [3.9.1-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.7.0...HEAD)
|
||||
|
||||
### 📖 Documentation
|
||||
|
||||
- Renamed to proper name [\#2546](https://github.com/pypeclub/OpenPype/pull/2546)
|
||||
- Slack: Add review to notification message [\#2498](https://github.com/pypeclub/OpenPype/pull/2498)
|
||||
|
||||
**🆕 New features**
|
||||
|
||||
- Flame: collecting publishable instances [\#2519](https://github.com/pypeclub/OpenPype/pull/2519)
|
||||
- Flame: create publishable clips [\#2495](https://github.com/pypeclub/OpenPype/pull/2495)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- Webpublisher: Moved error at the beginning of the log [\#2559](https://github.com/pypeclub/OpenPype/pull/2559)
|
||||
- Ftrack: Use ApplicationManager to get DJV path [\#2558](https://github.com/pypeclub/OpenPype/pull/2558)
|
||||
- Webpublisher: Added endpoint to reprocess batch through UI [\#2555](https://github.com/pypeclub/OpenPype/pull/2555)
|
||||
- Settings: PathInput strip passed string [\#2550](https://github.com/pypeclub/OpenPype/pull/2550)
|
||||
- Global: Exctract Review anatomy fill data with output name [\#2548](https://github.com/pypeclub/OpenPype/pull/2548)
|
||||
- Flame: extracting segments with trans-coding [\#2547](https://github.com/pypeclub/OpenPype/pull/2547)
|
||||
- Cosmetics: Clean up some cosmetics / typos [\#2542](https://github.com/pypeclub/OpenPype/pull/2542)
|
||||
- General: Validate if current process OpenPype version is requested version [\#2529](https://github.com/pypeclub/OpenPype/pull/2529)
|
||||
- General: Be able to use anatomy data in ffmpeg output arguments [\#2525](https://github.com/pypeclub/OpenPype/pull/2525)
|
||||
- Expose toggle publish plug-in settings for Maya Look Shading Engine Naming [\#2521](https://github.com/pypeclub/OpenPype/pull/2521)
|
||||
- Photoshop: Move implementation to OpenPype [\#2510](https://github.com/pypeclub/OpenPype/pull/2510)
|
||||
- TimersManager: Move module one hierarchy higher [\#2501](https://github.com/pypeclub/OpenPype/pull/2501)
|
||||
- Slack: notifications are sent with Openpype logo and bot name [\#2499](https://github.com/pypeclub/OpenPype/pull/2499)
|
||||
- Project Manager: Remove project button cleanup [\#2482](https://github.com/pypeclub/OpenPype/pull/2482)
|
||||
- Maya: Refactor missing \_get\_reference\_node method [\#2455](https://github.com/pypeclub/OpenPype/pull/2455)
|
||||
- Houdini: Remove broken unique name counter [\#2450](https://github.com/pypeclub/OpenPype/pull/2450)
|
||||
- Maya: Improve lib.polyConstraint performance when Select tool is not the active tool context [\#2447](https://github.com/pypeclub/OpenPype/pull/2447)
|
||||
- General: Validate third party before build [\#2425](https://github.com/pypeclub/OpenPype/pull/2425)
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.0...HEAD)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- AfterEffects: Fix - removed obsolete import [\#2577](https://github.com/pypeclub/OpenPype/pull/2577)
|
||||
- Ftrack: Delete action revision [\#2563](https://github.com/pypeclub/OpenPype/pull/2563)
|
||||
- Webpublisher: ftrack shows incorrect user names [\#2560](https://github.com/pypeclub/OpenPype/pull/2560)
|
||||
- General: Do not validate version if build does not support it [\#2557](https://github.com/pypeclub/OpenPype/pull/2557)
|
||||
- Webpublisher: Fixed progress reporting [\#2553](https://github.com/pypeclub/OpenPype/pull/2553)
|
||||
- Fix Maya AssProxyLoader version switch [\#2551](https://github.com/pypeclub/OpenPype/pull/2551)
|
||||
- General: Fix install thread in igniter [\#2549](https://github.com/pypeclub/OpenPype/pull/2549)
|
||||
- Houdini: vdbcache family preserve frame numbers on publish integration + enable validate version for Houdini [\#2535](https://github.com/pypeclub/OpenPype/pull/2535)
|
||||
- Maya: Fix Load VDB to V-Ray [\#2533](https://github.com/pypeclub/OpenPype/pull/2533)
|
||||
- Fix create zip tool - path argument [\#2522](https://github.com/pypeclub/OpenPype/pull/2522)
|
||||
- Maya: Fix Extract Look with space in names [\#2518](https://github.com/pypeclub/OpenPype/pull/2518)
|
||||
- Fix published frame content for sequence starting with 0 [\#2513](https://github.com/pypeclub/OpenPype/pull/2513)
|
||||
- Maya: reset empty string attributes correctly to "" instead of "None" [\#2506](https://github.com/pypeclub/OpenPype/pull/2506)
|
||||
- Improve FusionPreLaunch hook errors [\#2505](https://github.com/pypeclub/OpenPype/pull/2505)
|
||||
- Maya: Validate Shape Zero do not keep fixed geometry vertices selected/active after repair [\#2456](https://github.com/pypeclub/OpenPype/pull/2456)
|
||||
- Harmony - fixed creator issue [\#2891](https://github.com/pypeclub/OpenPype/pull/2891)
|
||||
- General: Remove forgotten use of avalon Creator [\#2885](https://github.com/pypeclub/OpenPype/pull/2885)
|
||||
- General: Avoid circular import [\#2884](https://github.com/pypeclub/OpenPype/pull/2884)
|
||||
- Fixes for attaching loaded containers \(\#2837\) [\#2874](https://github.com/pypeclub/OpenPype/pull/2874)
|
||||
|
||||
**Merged pull requests:**
|
||||
**🔀 Refactored code**
|
||||
|
||||
- AfterEffects: Move implementation to OpenPype [\#2543](https://github.com/pypeclub/OpenPype/pull/2543)
|
||||
- Maya: Remove Maya Look Assigner check on startup [\#2540](https://github.com/pypeclub/OpenPype/pull/2540)
|
||||
- build\(deps\): bump shelljs from 0.8.4 to 0.8.5 in /website [\#2538](https://github.com/pypeclub/OpenPype/pull/2538)
|
||||
- build\(deps\): bump follow-redirects from 1.14.4 to 1.14.7 in /website [\#2534](https://github.com/pypeclub/OpenPype/pull/2534)
|
||||
- Maya: ReferenceLoader fix not unique group name error for attach to root [\#2532](https://github.com/pypeclub/OpenPype/pull/2532)
|
||||
- Maya: namespaced context go back to original namespace when started from inside a namespace [\#2531](https://github.com/pypeclub/OpenPype/pull/2531)
|
||||
- Nuke: Merge avalon's implementation into OpenPype [\#2514](https://github.com/pypeclub/OpenPype/pull/2514)
|
||||
- General: Reduce style usage to OpenPype repository [\#2889](https://github.com/pypeclub/OpenPype/pull/2889)
|
||||
|
||||
## [3.9.0](https://github.com/pypeclub/OpenPype/tree/3.9.0) (2022-03-14)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.0-nightly.9...3.9.0)
|
||||
|
||||
**Deprecated:**
|
||||
|
||||
- AssetCreator: Remove the tool [\#2845](https://github.com/pypeclub/OpenPype/pull/2845)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- General: Subset name filtering in ExtractReview outpus [\#2872](https://github.com/pypeclub/OpenPype/pull/2872)
|
||||
- NewPublisher: Descriptions and Icons in creator dialog [\#2867](https://github.com/pypeclub/OpenPype/pull/2867)
|
||||
- NewPublisher: Changing task on publishing instance [\#2863](https://github.com/pypeclub/OpenPype/pull/2863)
|
||||
- TrayPublisher: Choose project widget is more clear [\#2859](https://github.com/pypeclub/OpenPype/pull/2859)
|
||||
- New: Validation exceptions [\#2841](https://github.com/pypeclub/OpenPype/pull/2841)
|
||||
- Maya: add loaded containers to published instance [\#2837](https://github.com/pypeclub/OpenPype/pull/2837)
|
||||
- Ftrack: Can sync fps as string [\#2836](https://github.com/pypeclub/OpenPype/pull/2836)
|
||||
- General: Custom function for find executable [\#2822](https://github.com/pypeclub/OpenPype/pull/2822)
|
||||
- General: Color dialog UI fixes [\#2817](https://github.com/pypeclub/OpenPype/pull/2817)
|
||||
- global: letter box calculated on output as last process [\#2812](https://github.com/pypeclub/OpenPype/pull/2812)
|
||||
- Nuke: adding Reformat to baking mov plugin [\#2811](https://github.com/pypeclub/OpenPype/pull/2811)
|
||||
- Manager: Update all to latest button [\#2805](https://github.com/pypeclub/OpenPype/pull/2805)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- General: Missing time function [\#2877](https://github.com/pypeclub/OpenPype/pull/2877)
|
||||
- Deadline: Fix plugin name for tile assemble [\#2868](https://github.com/pypeclub/OpenPype/pull/2868)
|
||||
- Nuke: gizmo precollect fix [\#2866](https://github.com/pypeclub/OpenPype/pull/2866)
|
||||
- General: Fix hardlink for windows [\#2864](https://github.com/pypeclub/OpenPype/pull/2864)
|
||||
- General: ffmpeg was crashing on slate merge [\#2860](https://github.com/pypeclub/OpenPype/pull/2860)
|
||||
- WebPublisher: Video file was published with one too many frame [\#2858](https://github.com/pypeclub/OpenPype/pull/2858)
|
||||
- New Publisher: Error dialog got right styles [\#2857](https://github.com/pypeclub/OpenPype/pull/2857)
|
||||
- General: Fix getattr clalback on dynamic modules [\#2855](https://github.com/pypeclub/OpenPype/pull/2855)
|
||||
- Nuke: slate resolution to input video resolution [\#2853](https://github.com/pypeclub/OpenPype/pull/2853)
|
||||
- WebPublisher: Fix username stored in DB [\#2852](https://github.com/pypeclub/OpenPype/pull/2852)
|
||||
- WebPublisher: Fix wrong number of frames for video file [\#2851](https://github.com/pypeclub/OpenPype/pull/2851)
|
||||
- Nuke: Fix family test in validate\_write\_legacy to work with stillImage [\#2847](https://github.com/pypeclub/OpenPype/pull/2847)
|
||||
- Nuke: fix multiple baking profile farm publishing [\#2842](https://github.com/pypeclub/OpenPype/pull/2842)
|
||||
- Blender: Fixed parameters for FBX export of the camera [\#2840](https://github.com/pypeclub/OpenPype/pull/2840)
|
||||
- Maya: Stop creation of reviews for Cryptomattes [\#2832](https://github.com/pypeclub/OpenPype/pull/2832)
|
||||
- Deadline: Remove recreated event [\#2828](https://github.com/pypeclub/OpenPype/pull/2828)
|
||||
- Deadline: Added missing events folder [\#2827](https://github.com/pypeclub/OpenPype/pull/2827)
|
||||
- Settings: Missing document with OP versions may break start of OpenPype [\#2825](https://github.com/pypeclub/OpenPype/pull/2825)
|
||||
- Deadline: more detailed temp file name for environment json [\#2824](https://github.com/pypeclub/OpenPype/pull/2824)
|
||||
- General: Host name was formed from obsolete code [\#2821](https://github.com/pypeclub/OpenPype/pull/2821)
|
||||
- Settings UI: Fix "Apply from" action [\#2820](https://github.com/pypeclub/OpenPype/pull/2820)
|
||||
- Ftrack: Job killer with missing user [\#2819](https://github.com/pypeclub/OpenPype/pull/2819)
|
||||
- Nuke: Use AVALON\_APP to get value for "app" key [\#2818](https://github.com/pypeclub/OpenPype/pull/2818)
|
||||
- StandalonePublisher: use dynamic groups in subset names [\#2816](https://github.com/pypeclub/OpenPype/pull/2816)
|
||||
|
||||
**🔀 Refactored code**
|
||||
|
||||
- Refactor: move webserver tool to openpype [\#2876](https://github.com/pypeclub/OpenPype/pull/2876)
|
||||
- General: Move create logic from avalon to OpenPype [\#2854](https://github.com/pypeclub/OpenPype/pull/2854)
|
||||
- General: Add vendors from avalon [\#2848](https://github.com/pypeclub/OpenPype/pull/2848)
|
||||
- General: Basic event system [\#2846](https://github.com/pypeclub/OpenPype/pull/2846)
|
||||
- General: Move change context functions [\#2839](https://github.com/pypeclub/OpenPype/pull/2839)
|
||||
- Tools: Don't use avalon tools code [\#2829](https://github.com/pypeclub/OpenPype/pull/2829)
|
||||
- Move Unreal Implementation to OpenPype [\#2823](https://github.com/pypeclub/OpenPype/pull/2823)
|
||||
- General: Extract template formatting from anatomy [\#2766](https://github.com/pypeclub/OpenPype/pull/2766)
|
||||
|
||||
## [3.8.2](https://github.com/pypeclub/OpenPype/tree/3.8.2) (2022-02-07)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.8.2-nightly.3...3.8.2)
|
||||
|
||||
## [3.8.1](https://github.com/pypeclub/OpenPype/tree/3.8.1) (2022-02-01)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.8.1-nightly.3...3.8.1)
|
||||
|
||||
## [3.8.0](https://github.com/pypeclub/OpenPype/tree/3.8.0) (2022-01-24)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.8.0-nightly.7...3.8.0)
|
||||
|
||||
## [3.7.0](https://github.com/pypeclub/OpenPype/tree/3.7.0) (2022-01-04)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.7.0-nightly.14...3.7.0)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- General: Workdir extra folders [\#2462](https://github.com/pypeclub/OpenPype/pull/2462)
|
||||
- Photoshop: New style validations for New publisher [\#2429](https://github.com/pypeclub/OpenPype/pull/2429)
|
||||
- General: Environment variables groups [\#2424](https://github.com/pypeclub/OpenPype/pull/2424)
|
||||
- Unreal: Dynamic menu created in Python [\#2422](https://github.com/pypeclub/OpenPype/pull/2422)
|
||||
- Settings UI: Hyperlinks to settings [\#2420](https://github.com/pypeclub/OpenPype/pull/2420)
|
||||
- Modules: JobQueue module moved one hierarchy level higher [\#2419](https://github.com/pypeclub/OpenPype/pull/2419)
|
||||
- TimersManager: Start timer post launch hook [\#2418](https://github.com/pypeclub/OpenPype/pull/2418)
|
||||
- General: Run applications as separate processes under linux [\#2408](https://github.com/pypeclub/OpenPype/pull/2408)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- TVPaint: Create render layer dialog is in front [\#2471](https://github.com/pypeclub/OpenPype/pull/2471)
|
||||
- Short Pyblish plugin path [\#2428](https://github.com/pypeclub/OpenPype/pull/2428)
|
||||
- PS: Introduced settings for invalid characters to use in ValidateNaming plugin [\#2417](https://github.com/pypeclub/OpenPype/pull/2417)
|
||||
- Settings UI: Breadcrumbs path does not create new entities [\#2416](https://github.com/pypeclub/OpenPype/pull/2416)
|
||||
- AfterEffects: Variant 2022 is in defaults but missing in schemas [\#2412](https://github.com/pypeclub/OpenPype/pull/2412)
|
||||
- Nuke: baking representations was not additive [\#2406](https://github.com/pypeclub/OpenPype/pull/2406)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Forced cx\_freeze to include sqlite3 into build [\#2432](https://github.com/pypeclub/OpenPype/pull/2432)
|
||||
|
||||
## [3.6.4](https://github.com/pypeclub/OpenPype/tree/3.6.4) (2021-11-23)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.7.0-nightly.1...3.6.4)
|
||||
|
|
|
|||
35
Dockerfile
35
Dockerfile
|
|
@ -1,13 +1,18 @@
|
|||
# Build Pype docker image
|
||||
FROM debian:bookworm-slim AS builder
|
||||
FROM ubuntu:focal AS builder
|
||||
ARG OPENPYPE_PYTHON_VERSION=3.7.12
|
||||
ARG BUILD_DATE
|
||||
ARG VERSION
|
||||
|
||||
LABEL maintainer="info@openpype.io"
|
||||
LABEL description="Docker Image to build and run OpenPype"
|
||||
LABEL description="Docker Image to build and run OpenPype under Ubuntu 20.04"
|
||||
LABEL org.opencontainers.image.name="pypeclub/openpype"
|
||||
LABEL org.opencontainers.image.title="OpenPype Docker Image"
|
||||
LABEL org.opencontainers.image.url="https://openpype.io/"
|
||||
LABEL org.opencontainers.image.source="https://github.com/pypeclub/pype"
|
||||
LABEL org.opencontainers.image.source="https://github.com/pypeclub/OpenPype"
|
||||
LABEL org.opencontainers.image.documentation="https://openpype.io/docs/system_introduction"
|
||||
LABEL org.opencontainers.image.created=$BUILD_DATE
|
||||
LABEL org.opencontainers.image.version=$VERSION
|
||||
|
||||
USER root
|
||||
|
||||
|
|
@ -42,14 +47,19 @@ RUN apt-get update \
|
|||
|
||||
SHELL ["/bin/bash", "-c"]
|
||||
|
||||
|
||||
RUN mkdir /opt/openpype
|
||||
|
||||
# download and install pyenv
|
||||
RUN curl https://pyenv.run | bash \
|
||||
&& echo 'export PATH="$HOME/.pyenv/bin:$PATH"'>> $HOME/.bashrc \
|
||||
&& echo 'eval "$(pyenv init -)"' >> $HOME/.bashrc \
|
||||
&& echo 'eval "$(pyenv virtualenv-init -)"' >> $HOME/.bashrc \
|
||||
&& echo 'eval "$(pyenv init --path)"' >> $HOME/.bashrc \
|
||||
&& source $HOME/.bashrc && pyenv install ${OPENPYPE_PYTHON_VERSION}
|
||||
&& echo 'export PATH="$HOME/.pyenv/bin:$PATH"'>> $HOME/init_pyenv.sh \
|
||||
&& echo 'eval "$(pyenv init -)"' >> $HOME/init_pyenv.sh \
|
||||
&& echo 'eval "$(pyenv virtualenv-init -)"' >> $HOME/init_pyenv.sh \
|
||||
&& echo 'eval "$(pyenv init --path)"' >> $HOME/init_pyenv.sh
|
||||
|
||||
# install python with pyenv
|
||||
RUN source $HOME/init_pyenv.sh \
|
||||
&& pyenv install ${OPENPYPE_PYTHON_VERSION}
|
||||
|
||||
COPY . /opt/openpype/
|
||||
|
||||
|
|
@ -57,13 +67,16 @@ RUN chmod +x /opt/openpype/tools/create_env.sh && chmod +x /opt/openpype/tools/b
|
|||
|
||||
WORKDIR /opt/openpype
|
||||
|
||||
# set local python version
|
||||
RUN cd /opt/openpype \
|
||||
&& source $HOME/.bashrc \
|
||||
&& source $HOME/init_pyenv.sh \
|
||||
&& pyenv local ${OPENPYPE_PYTHON_VERSION}
|
||||
|
||||
RUN source $HOME/.bashrc \
|
||||
# fetch third party tools/libraries
|
||||
RUN source $HOME/init_pyenv.sh \
|
||||
&& ./tools/create_env.sh \
|
||||
&& ./tools/fetch_thirdparty_libs.sh
|
||||
|
||||
RUN source $HOME/.bashrc \
|
||||
# build openpype
|
||||
RUN source $HOME/init_pyenv.sh \
|
||||
&& bash ./tools/build.sh
|
||||
|
|
|
|||
|
|
@ -1,11 +1,15 @@
|
|||
# Build Pype docker image
|
||||
FROM centos:7 AS builder
|
||||
ARG OPENPYPE_PYTHON_VERSION=3.7.10
|
||||
ARG OPENPYPE_PYTHON_VERSION=3.7.12
|
||||
|
||||
LABEL org.opencontainers.image.name="pypeclub/openpype"
|
||||
LABEL org.opencontainers.image.title="OpenPype Docker Image"
|
||||
LABEL org.opencontainers.image.url="https://openpype.io/"
|
||||
LABEL org.opencontainers.image.source="https://github.com/pypeclub/pype"
|
||||
LABEL org.opencontainers.image.documentation="https://openpype.io/docs/system_introduction"
|
||||
LABEL org.opencontainers.image.created=$BUILD_DATE
|
||||
LABEL org.opencontainers.image.version=$VERSION
|
||||
|
||||
|
||||
USER root
|
||||
|
||||
|
|
@ -38,7 +42,8 @@ RUN yum -y install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.n
|
|||
patchelf \
|
||||
automake \
|
||||
autoconf \
|
||||
ncurses \
|
||||
patch \
|
||||
ncurses \
|
||||
ncurses-devel \
|
||||
qt5-qtbase-devel \
|
||||
xcb-util-wm \
|
||||
|
|
|
|||
|
|
@ -23,7 +23,8 @@ from .user_settings import (
|
|||
OpenPypeSettingsRegistry
|
||||
)
|
||||
from .tools import (
|
||||
get_openpype_path_from_db,
|
||||
get_openpype_global_settings,
|
||||
get_openpype_path_from_settings,
|
||||
get_expected_studio_version_str
|
||||
)
|
||||
|
||||
|
|
@ -973,9 +974,19 @@ class BootstrapRepos:
|
|||
for line in checksums_data.split("\n") if line
|
||||
]
|
||||
|
||||
# get list of files in zip minus `checksums` file itself
|
||||
# and turn in to set to compare against list of files
|
||||
# from checksum file. If difference exists, something is
|
||||
# wrong
|
||||
files_in_zip = set(zip_file.namelist())
|
||||
files_in_zip.remove("checksums")
|
||||
files_in_checksum = {file[1] for file in checksums}
|
||||
diff = files_in_zip.difference(files_in_checksum)
|
||||
if diff:
|
||||
return False, f"Missing files {diff}"
|
||||
|
||||
# calculate and compare checksums in the zip file
|
||||
for file in checksums:
|
||||
file_name = file[1]
|
||||
for file_checksum, file_name in checksums:
|
||||
if platform.system().lower() == "windows":
|
||||
file_name = file_name.replace("/", "\\")
|
||||
h = hashlib.sha256()
|
||||
|
|
@ -983,21 +994,9 @@ class BootstrapRepos:
|
|||
h.update(zip_file.read(file_name))
|
||||
except FileNotFoundError:
|
||||
return False, f"Missing file [ {file_name} ]"
|
||||
if h.hexdigest() != file[0]:
|
||||
if h.hexdigest() != file_checksum:
|
||||
return False, f"Invalid checksum on {file_name}"
|
||||
|
||||
# get list of files in zip minus `checksums` file itself
|
||||
# and turn in to set to compare against list of files
|
||||
# from checksum file. If difference exists, something is
|
||||
# wrong
|
||||
files_in_zip = zip_file.namelist()
|
||||
files_in_zip.remove("checksums")
|
||||
files_in_zip = set(files_in_zip)
|
||||
files_in_checksum = {file[1] for file in checksums}
|
||||
diff = files_in_zip.difference(files_in_checksum)
|
||||
if diff:
|
||||
return False, f"Missing files {diff}"
|
||||
|
||||
return True, "All ok"
|
||||
|
||||
@staticmethod
|
||||
|
|
@ -1011,16 +1010,22 @@ class BootstrapRepos:
|
|||
tuple(line.split(":"))
|
||||
for line in checksums_data.split("\n") if line
|
||||
]
|
||||
files_in_dir = [
|
||||
|
||||
# compare file list against list of files from checksum file.
|
||||
# If difference exists, something is wrong and we invalidate directly
|
||||
files_in_dir = set(
|
||||
file.relative_to(path).as_posix()
|
||||
for file in path.iterdir() if file.is_file()
|
||||
]
|
||||
)
|
||||
files_in_dir.remove("checksums")
|
||||
files_in_dir = set(files_in_dir)
|
||||
files_in_checksum = {file[1] for file in checksums}
|
||||
|
||||
for file in checksums:
|
||||
file_name = file[1]
|
||||
diff = files_in_dir.difference(files_in_checksum)
|
||||
if diff:
|
||||
return False, f"Missing files {diff}"
|
||||
|
||||
# calculate and compare checksums
|
||||
for file_checksum, file_name in checksums:
|
||||
if platform.system().lower() == "windows":
|
||||
file_name = file_name.replace("/", "\\")
|
||||
try:
|
||||
|
|
@ -1028,11 +1033,8 @@ class BootstrapRepos:
|
|||
except FileNotFoundError:
|
||||
return False, f"Missing file [ {file_name} ]"
|
||||
|
||||
if file[0] != current:
|
||||
if file_checksum != current:
|
||||
return False, f"Invalid checksum on {file_name}"
|
||||
diff = files_in_dir.difference(files_in_checksum)
|
||||
if diff:
|
||||
return False, f"Missing files {diff}"
|
||||
|
||||
return True, "All ok"
|
||||
|
||||
|
|
@ -1262,7 +1264,8 @@ class BootstrapRepos:
|
|||
openpype_path = None
|
||||
# try to get OpenPype path from mongo.
|
||||
if location.startswith("mongodb"):
|
||||
openpype_path = get_openpype_path_from_db(location)
|
||||
global_settings = get_openpype_global_settings(location)
|
||||
openpype_path = get_openpype_path_from_settings(global_settings)
|
||||
if not openpype_path:
|
||||
self._print("cannot find OPENPYPE_PATH in settings.")
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ from Qt.QtCore import QTimer # noqa
|
|||
from .install_thread import InstallThread
|
||||
from .tools import (
|
||||
validate_mongo_connection,
|
||||
get_openpype_path_from_db,
|
||||
get_openpype_icon_path
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -161,18 +161,17 @@ def get_openpype_global_settings(url: str) -> dict:
|
|||
return global_settings.get("data") or {}
|
||||
|
||||
|
||||
def get_openpype_path_from_db(url: str) -> Union[str, None]:
|
||||
def get_openpype_path_from_settings(settings: dict) -> Union[str, None]:
|
||||
"""Get OpenPype path from global settings.
|
||||
|
||||
Args:
|
||||
url (str): mongodb url.
|
||||
settings (dict): mongodb url.
|
||||
|
||||
Returns:
|
||||
path to OpenPype or None if not found
|
||||
"""
|
||||
global_settings = get_openpype_global_settings(url)
|
||||
paths = (
|
||||
global_settings
|
||||
settings
|
||||
.get("openpype_path", {})
|
||||
.get(platform.system().lower())
|
||||
) or []
|
||||
|
|
|
|||
|
|
@ -10,7 +10,8 @@ from .lib import (
|
|||
Anatomy,
|
||||
filter_pyblish_plugins,
|
||||
set_plugin_attributes_from_settings,
|
||||
change_timer_to_current_context
|
||||
change_timer_to_current_context,
|
||||
register_event_callback,
|
||||
)
|
||||
|
||||
pyblish = avalon = _original_discover = None
|
||||
|
|
@ -58,10 +59,15 @@ def patched_discover(superclass):
|
|||
"""
|
||||
# run original discover and get plugins
|
||||
plugins = _original_discover(superclass)
|
||||
filtered_plugins = [
|
||||
plugin
|
||||
for plugin in plugins
|
||||
if issubclass(plugin, superclass)
|
||||
]
|
||||
|
||||
set_plugin_attributes_from_settings(plugins, superclass)
|
||||
set_plugin_attributes_from_settings(filtered_plugins, superclass)
|
||||
|
||||
return plugins
|
||||
return filtered_plugins
|
||||
|
||||
|
||||
@import_wrapper
|
||||
|
|
@ -69,6 +75,7 @@ def install():
|
|||
"""Install Pype to Avalon."""
|
||||
from pyblish.lib import MessageHandler
|
||||
from openpype.modules import load_modules
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from avalon import pipeline
|
||||
|
||||
# Make sure modules are loaded
|
||||
|
|
@ -113,7 +120,7 @@ def install():
|
|||
|
||||
pyblish.register_plugin_path(path)
|
||||
avalon.register_plugin_path(avalon.Loader, path)
|
||||
avalon.register_plugin_path(avalon.Creator, path)
|
||||
avalon.register_plugin_path(LegacyCreator, path)
|
||||
avalon.register_plugin_path(avalon.InventoryAction, path)
|
||||
|
||||
# apply monkey patched discover to original one
|
||||
|
|
@ -122,10 +129,10 @@ def install():
|
|||
avalon.discover = patched_discover
|
||||
pipeline.discover = patched_discover
|
||||
|
||||
avalon.on("taskChanged", _on_task_change)
|
||||
register_event_callback("taskChanged", _on_task_change)
|
||||
|
||||
|
||||
def _on_task_change(*args):
|
||||
def _on_task_change():
|
||||
change_timer_to_current_context()
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -45,9 +45,6 @@ from .lib.avalon_context import (
|
|||
from . import resources
|
||||
|
||||
from .plugin import (
|
||||
PypeCreatorMixin,
|
||||
Creator,
|
||||
|
||||
Extractor,
|
||||
|
||||
ValidatePipelineOrder,
|
||||
|
|
@ -89,9 +86,6 @@ __all__ = [
|
|||
# Resources
|
||||
"resources",
|
||||
|
||||
# Pype creator mixin
|
||||
"PypeCreatorMixin",
|
||||
"Creator",
|
||||
# plugin classes
|
||||
"Extractor",
|
||||
# ordering
|
||||
|
|
|
|||
|
|
@ -42,6 +42,12 @@ def standalonepublisher():
|
|||
PypeCommands().launch_standalone_publisher()
|
||||
|
||||
|
||||
@main.command()
|
||||
def traypublisher():
|
||||
"""Show new OpenPype Standalone publisher UI."""
|
||||
PypeCommands().launch_traypublisher()
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.option("-d", "--debug",
|
||||
is_flag=True, help=("Run pype tray in debug mode"))
|
||||
|
|
@ -371,10 +377,15 @@ def run(script):
|
|||
"--app_variant",
|
||||
help="Provide specific app variant for test, empty for latest",
|
||||
default=None)
|
||||
def runtests(folder, mark, pyargs, test_data_folder, persist, app_variant):
|
||||
@click.option("-t",
|
||||
"--timeout",
|
||||
help="Provide specific timeout value for test case",
|
||||
default=None)
|
||||
def runtests(folder, mark, pyargs, test_data_folder, persist, app_variant,
|
||||
timeout):
|
||||
"""Run all automatic tests after proper initialization via start.py"""
|
||||
PypeCommands().run_tests(folder, mark, pyargs, test_data_folder,
|
||||
persist, app_variant)
|
||||
persist, app_variant, timeout)
|
||||
|
||||
|
||||
@main.command()
|
||||
|
|
@ -412,3 +423,23 @@ def repack_version(directory):
|
|||
directory name.
|
||||
"""
|
||||
PypeCommands().repack_version(directory)
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.option("--project", help="Project name")
|
||||
@click.option(
|
||||
"--dirpath", help="Directory where package is stored", default=None
|
||||
)
|
||||
def pack_project(project, dirpath):
|
||||
"""Create a package of project with all files and database dump."""
|
||||
PypeCommands().pack_project(project, dirpath)
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.option("--zipfile", help="Path to zip file")
|
||||
@click.option(
|
||||
"--root", help="Replace root which was stored in project", default=None
|
||||
)
|
||||
def unpack_project(zipfile, root):
|
||||
"""Create a package of project with all files and database dump."""
|
||||
PypeCommands().unpack_project(zipfile, root)
|
||||
|
|
|
|||
|
|
@ -6,6 +6,8 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
|||
"""Add last workfile path to launch arguments.
|
||||
|
||||
This is not possible to do for all applications the same way.
|
||||
Checks 'start_last_workfile', if set to False, it will not open last
|
||||
workfile. This property is set explicitly in Launcher.
|
||||
"""
|
||||
|
||||
# Execute after workfile template copy
|
||||
|
|
@ -15,11 +17,12 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
|||
"nuke",
|
||||
"nukex",
|
||||
"hiero",
|
||||
"houdini",
|
||||
"nukestudio",
|
||||
"blender",
|
||||
"photoshop",
|
||||
"tvpaint",
|
||||
"afftereffects"
|
||||
"aftereffects"
|
||||
]
|
||||
|
||||
def execute(self):
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class CopyTemplateWorkfile(PreLaunchHook):
|
|||
|
||||
# Before `AddLastWorkfileToLaunchArgs`
|
||||
order = 0
|
||||
app_groups = ["blender", "photoshop", "tvpaint", "afftereffects"]
|
||||
app_groups = ["blender", "photoshop", "tvpaint", "aftereffects"]
|
||||
|
||||
def execute(self):
|
||||
"""Check if can copy template for context and do it if possible.
|
||||
|
|
@ -44,7 +44,7 @@ class CopyTemplateWorkfile(PreLaunchHook):
|
|||
return
|
||||
|
||||
if os.path.exists(last_workfile):
|
||||
self.log.debug("Last workfile exits. Skipping {} process.".format(
|
||||
self.log.debug("Last workfile exists. Skipping {} process.".format(
|
||||
self.__class__.__name__
|
||||
))
|
||||
return
|
||||
|
|
@ -120,7 +120,7 @@ class CopyTemplateWorkfile(PreLaunchHook):
|
|||
f"Creating workfile from template: \"{template_path}\""
|
||||
)
|
||||
|
||||
# Copy template workfile to new destinantion
|
||||
# Copy template workfile to new destination
|
||||
shutil.copy2(
|
||||
os.path.normpath(template_path),
|
||||
os.path.normpath(last_workfile)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from openpype.api import Anatomy
|
|||
from openpype.lib import (
|
||||
PreLaunchHook,
|
||||
EnvironmentPrepData,
|
||||
prepare_host_environments,
|
||||
prepare_app_environments,
|
||||
prepare_context_environments
|
||||
)
|
||||
|
||||
|
|
@ -14,14 +14,6 @@ class GlobalHostDataHook(PreLaunchHook):
|
|||
|
||||
def execute(self):
|
||||
"""Prepare global objects to `data` that will be used for sure."""
|
||||
if not self.application.is_host:
|
||||
self.log.info(
|
||||
"Skipped hook {}. Application is not marked as host.".format(
|
||||
self.__class__.__name__
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
self.prepare_global_data()
|
||||
|
||||
if not self.data.get("asset_doc"):
|
||||
|
|
@ -43,12 +35,13 @@ class GlobalHostDataHook(PreLaunchHook):
|
|||
|
||||
"env": self.launch_context.env,
|
||||
|
||||
"start_last_workfile": self.data.get("start_last_workfile"),
|
||||
"last_workfile_path": self.data.get("last_workfile_path"),
|
||||
|
||||
"log": self.log
|
||||
})
|
||||
|
||||
prepare_host_environments(temp_data, self.launch_context.env_group)
|
||||
prepare_app_environments(temp_data, self.launch_context.env_group)
|
||||
prepare_context_environments(temp_data)
|
||||
|
||||
temp_data.pop("log")
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import os
|
||||
import subprocess
|
||||
|
||||
from openpype.lib import (
|
||||
PreLaunchHook,
|
||||
get_openpype_execute_args
|
||||
)
|
||||
from openpype.lib.applications import get_non_python_host_kwargs
|
||||
|
||||
from openpype import PACKAGE_DIR as OPENPYPE_DIR
|
||||
|
||||
|
|
@ -40,7 +40,10 @@ class NonPythonHostHook(PreLaunchHook):
|
|||
)
|
||||
# Add workfile path if exists
|
||||
workfile_path = self.data["last_workfile_path"]
|
||||
if os.path.exists(workfile_path):
|
||||
if (
|
||||
self.data.get("start_last_workfile")
|
||||
and workfile_path
|
||||
and os.path.exists(workfile_path)):
|
||||
new_launch_args.append(workfile_path)
|
||||
|
||||
# Append as whole list as these areguments should not be separated
|
||||
|
|
@ -48,3 +51,7 @@ class NonPythonHostHook(PreLaunchHook):
|
|||
|
||||
if remainders:
|
||||
self.launch_context.launch_args.extend(remainders)
|
||||
|
||||
self.launch_context.kwargs = \
|
||||
get_non_python_host_kwargs(self.launch_context.kwargs)
|
||||
|
||||
|
|
|
|||
Binary file not shown.
|
|
@ -1,5 +1,5 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ExtensionManifest Version="8.0" ExtensionBundleId="com.openpype.AE.panel" ExtensionBundleVersion="1.0.21"
|
||||
<ExtensionManifest Version="8.0" ExtensionBundleId="com.openpype.AE.panel" ExtensionBundleVersion="1.0.22"
|
||||
ExtensionBundleName="openpype" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<ExtensionList>
|
||||
<Extension Id="com.openpype.AE.panel" Version="1.0" />
|
||||
|
|
|
|||
|
|
@ -301,6 +301,15 @@ function main(websocket_url){
|
|||
return get_extension_version();
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.get_app_version', function (data) {
|
||||
log.warn('Server called client route "get_app_version":', data);
|
||||
return runEvalScript("getAppVersion()")
|
||||
.then(function(result){
|
||||
log.warn("get_app_version: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('AfterEffects.close', function (data) {
|
||||
log.warn('Server called client route "close":', data);
|
||||
return runEvalScript("close()");
|
||||
|
|
|
|||
|
|
@ -438,7 +438,10 @@ function getAudioUrlForComp(comp_id){
|
|||
for (i = 1; i <= item.numLayers; ++i){
|
||||
var layer = item.layers[i];
|
||||
if (layer instanceof AVLayer){
|
||||
return layer.source.file.fsName.toString();
|
||||
if (layer.hasAudio){
|
||||
source_url = layer.source.file.fsName.toString()
|
||||
return _prepareSingleValue(source_url);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -715,6 +718,10 @@ function close(){
|
|||
app.quit();
|
||||
}
|
||||
|
||||
function getAppVersion(){
|
||||
return _prepareSingleValue(app.version);
|
||||
}
|
||||
|
||||
function _prepareSingleValue(value){
|
||||
return JSON.stringify({"result": value})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ from Qt import QtCore
|
|||
from openpype.tools.utils import host_tools
|
||||
|
||||
from avalon import api
|
||||
from avalon.tools.webserver.app import WebServerTool
|
||||
from openpype.tools.adobe_webserver.app import WebServerTool
|
||||
|
||||
from .ws_stub import AfterEffectsServerStub
|
||||
|
||||
|
|
|
|||
|
|
@ -35,7 +35,6 @@ def main(*subprocess_args):
|
|||
launcher.start()
|
||||
|
||||
if os.environ.get("HEADLESS_PUBLISH"):
|
||||
# reusing ConsoleTrayApp approach as it was already implemented
|
||||
launcher.execute_in_main_thread(lambda: headless_publish(
|
||||
log,
|
||||
"CloseAE",
|
||||
|
|
|
|||
|
|
@ -9,7 +9,9 @@ from avalon import io, pipeline
|
|||
|
||||
from openpype import lib
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import LegacyCreator
|
||||
import openpype.hosts.aftereffects
|
||||
from openpype.lib import register_event_callback
|
||||
|
||||
from .launch_logic import get_stub
|
||||
|
||||
|
|
@ -66,20 +68,20 @@ def install():
|
|||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
|
||||
avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
log.info(PUBLISH_PATH)
|
||||
|
||||
pyblish.api.register_callback(
|
||||
"instanceToggled", on_pyblish_instance_toggled
|
||||
)
|
||||
|
||||
avalon.api.on("application.launched", application_launch)
|
||||
register_event_callback("application.launched", application_launch)
|
||||
|
||||
|
||||
def uninstall():
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.api.deregister_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import logging
|
|||
import attr
|
||||
|
||||
from wsrpc_aiohttp import WebSocketAsync
|
||||
from avalon.tools.webserver.app import WebServerTool
|
||||
from openpype.tools.adobe_webserver.app import WebServerTool
|
||||
|
||||
|
||||
@attr.s
|
||||
|
|
@ -537,6 +537,13 @@ class AfterEffectsServerStub():
|
|||
|
||||
return self._handle_return(res)
|
||||
|
||||
def get_app_version(self):
|
||||
"""Returns version number of installed application (17.5...)."""
|
||||
res = self.websocketserver.call(self.client.call(
|
||||
'AfterEffects.get_app_version'))
|
||||
|
||||
return self._handle_return(res)
|
||||
|
||||
def close(self):
|
||||
res = self.websocketserver.call(self.client.call('AfterEffects.close'))
|
||||
|
||||
|
|
|
|||
|
|
@ -1,13 +1,12 @@
|
|||
from avalon.api import CreatorError
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import create
|
||||
from openpype.pipeline import CreatorError
|
||||
from openpype.hosts.aftereffects.api import (
|
||||
get_stub,
|
||||
list_instances
|
||||
)
|
||||
|
||||
|
||||
class CreateRender(openpype.api.Creator):
|
||||
class CreateRender(create.LegacyCreator):
|
||||
"""Render folder for publish.
|
||||
|
||||
Creates subsets in format 'familyTaskSubsetname',
|
||||
|
|
@ -19,6 +18,7 @@ class CreateRender(openpype.api.Creator):
|
|||
name = "renderDefault"
|
||||
label = "Render on Farm"
|
||||
family = "render"
|
||||
defaults = ["Main"]
|
||||
|
||||
def process(self):
|
||||
stub = get_stub() # only after After Effects is up
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ class AERenderInstance(RenderInstance):
|
|||
fps = attr.ib(default=None)
|
||||
projectEntity = attr.ib(default=None)
|
||||
stagingDir = attr.ib(default=None)
|
||||
app_version = attr.ib(default=None)
|
||||
|
||||
|
||||
class CollectAERender(abstract_collect_render.AbstractCollectRender):
|
||||
|
|
@ -41,6 +42,9 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender):
|
|||
def get_instances(self, context):
|
||||
instances = []
|
||||
|
||||
app_version = self.stub.get_app_version()
|
||||
app_version = app_version[0:4]
|
||||
|
||||
current_file = context.data["currentFile"]
|
||||
version = context.data["version"]
|
||||
asset_entity = context.data["assetEntity"]
|
||||
|
|
@ -105,7 +109,8 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender):
|
|||
frameEnd=frameEnd,
|
||||
frameStep=1,
|
||||
toBeRenderedOn='deadline',
|
||||
fps=fps
|
||||
fps=fps,
|
||||
app_version=app_version
|
||||
)
|
||||
|
||||
comp = compositions_by_id.get(int(item_id))
|
||||
|
|
@ -118,6 +123,7 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender):
|
|||
instance.anatomyData = context.data["anatomyData"]
|
||||
|
||||
instance.outputDir = self._get_output_dir(instance)
|
||||
instance.context = context
|
||||
|
||||
settings = get_project_settings(os.getenv("AVALON_PROJECT"))
|
||||
reviewable_subset_filter = \
|
||||
|
|
@ -142,7 +148,6 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender):
|
|||
break
|
||||
|
||||
self.log.info("New instance:: {}".format(instance))
|
||||
|
||||
instances.append(instance)
|
||||
|
||||
return instances
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Subset context</title>
|
||||
<description>
|
||||
## Invalid subset context
|
||||
|
||||
Context of the given subset doesn't match your current scene.
|
||||
|
||||
### How to repair?
|
||||
|
||||
You can fix this with "repair" button on the right.
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__ (optional)
|
||||
|
||||
This might happen if you are reuse old workfile and open it in different context.
|
||||
(Eg. you created subset "renderCompositingDefault" from asset "Robot' in "your_project_Robot_compositing.aep", now you opened this workfile in a context "Sloth" but existing subset for "Robot" asset stayed in the workfile.)
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Scene setting</title>
|
||||
<description>
|
||||
## Invalid scene setting found
|
||||
|
||||
One of the settings in a scene doesn't match to asset settings in database.
|
||||
|
||||
{invalid_setting_str}
|
||||
|
||||
### How to repair?
|
||||
|
||||
Change values for {invalid_keys_str} in the scene OR change them in the asset database if they are wrong there.
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__ (optional)
|
||||
|
||||
This error is shown when for example resolution in the scene doesn't match to resolution set on the asset in the database.
|
||||
Either value in the database or in the scene is wrong.
|
||||
</detail>
|
||||
</error>
|
||||
<error id="file_not_found">
|
||||
<title>Scene file doesn't exist</title>
|
||||
<description>
|
||||
## Scene file doesn't exist
|
||||
|
||||
Collected scene {scene_url} doesn't exist.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Re-save file, start publish from the beginning again.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
from avalon import api
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
from openpype.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
|
|
@ -53,9 +54,8 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin):
|
|||
current_asset = api.Session["AVALON_ASSET"]
|
||||
msg = (
|
||||
f"Instance asset {instance_asset} is not the same "
|
||||
f"as current context {current_asset}. PLEASE DO:\n"
|
||||
f"Repair with 'A' action to use '{current_asset}'.\n"
|
||||
f"If that's not correct value, close workfile and "
|
||||
f"reopen via Workfiles!"
|
||||
f"as current context {current_asset}."
|
||||
)
|
||||
assert instance_asset == current_asset, msg
|
||||
|
||||
if instance_asset != current_asset:
|
||||
raise PublishXmlValidationError(self, msg)
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import re
|
|||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
from openpype.hosts.aftereffects.api import get_asset_settings
|
||||
|
||||
|
||||
|
|
@ -99,12 +100,14 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
self.log.info("current_settings:: {}".format(current_settings))
|
||||
|
||||
invalid_settings = []
|
||||
invalid_keys = set()
|
||||
for key, value in expected_settings.items():
|
||||
if value != current_settings[key]:
|
||||
invalid_settings.append(
|
||||
"{} expected: {} found: {}".format(key, value,
|
||||
current_settings[key])
|
||||
)
|
||||
invalid_keys.add(key)
|
||||
|
||||
if ((expected_settings.get("handleStart")
|
||||
or expected_settings.get("handleEnd"))
|
||||
|
|
@ -116,7 +119,27 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
msg = "Found invalid settings:\n{}".format(
|
||||
"\n".join(invalid_settings)
|
||||
)
|
||||
assert not invalid_settings, msg
|
||||
assert os.path.exists(instance.data.get("source")), (
|
||||
"Scene file not found (saved under wrong name)"
|
||||
)
|
||||
|
||||
if invalid_settings:
|
||||
invalid_keys_str = ",".join(invalid_keys)
|
||||
break_str = "<br/>"
|
||||
invalid_setting_str = "<b>Found invalid settings:</b><br/>{}".\
|
||||
format(break_str.join(invalid_settings))
|
||||
|
||||
formatting_data = {
|
||||
"invalid_setting_str": invalid_setting_str,
|
||||
"invalid_keys_str": invalid_keys_str
|
||||
}
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
|
||||
if not os.path.exists(instance.data.get("source")):
|
||||
scene_url = instance.data.get("source")
|
||||
msg = "Scene file {} not found (saved under wrong name)".format(
|
||||
scene_url
|
||||
)
|
||||
formatting_data = {
|
||||
"scene_url": scene_url
|
||||
}
|
||||
raise PublishXmlValidationError(self, msg, key="file_not_found",
|
||||
formatting_data=formatting_data)
|
||||
|
|
|
|||
|
|
@ -5,11 +5,8 @@ def add_implementation_envs(env, _app):
|
|||
"""Modify environments to contain all required for implementation."""
|
||||
# Prepare path to implementation script
|
||||
implementation_user_script_path = os.path.join(
|
||||
os.environ["OPENPYPE_REPOS_ROOT"],
|
||||
"repos",
|
||||
"avalon-core",
|
||||
"setup",
|
||||
"blender"
|
||||
os.path.dirname(os.path.abspath(__file__)),
|
||||
"blender_addon"
|
||||
)
|
||||
|
||||
# Add blender implementation script path to PYTHONPATH
|
||||
|
|
|
|||
|
|
@ -1,94 +1,64 @@
|
|||
import os
|
||||
import sys
|
||||
import traceback
|
||||
"""Public API
|
||||
|
||||
import bpy
|
||||
Anything that isn't defined here is INTERNAL and unreliable for external use.
|
||||
|
||||
from .lib import append_user_scripts
|
||||
"""
|
||||
|
||||
from avalon import api as avalon
|
||||
from pyblish import api as pyblish
|
||||
from .pipeline import (
|
||||
install,
|
||||
uninstall,
|
||||
ls,
|
||||
publish,
|
||||
containerise,
|
||||
)
|
||||
|
||||
import openpype.hosts.blender
|
||||
from .plugin import (
|
||||
Creator,
|
||||
Loader,
|
||||
)
|
||||
|
||||
HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.blender.__file__))
|
||||
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
from .workio import (
|
||||
open_file,
|
||||
save_file,
|
||||
current_file,
|
||||
has_unsaved_changes,
|
||||
file_extensions,
|
||||
work_root,
|
||||
)
|
||||
|
||||
ORIGINAL_EXCEPTHOOK = sys.excepthook
|
||||
from .lib import (
|
||||
lsattr,
|
||||
lsattrs,
|
||||
read,
|
||||
maintained_selection,
|
||||
get_selection,
|
||||
# unique_name,
|
||||
)
|
||||
|
||||
|
||||
def pype_excepthook_handler(*args):
|
||||
traceback.print_exception(*args)
|
||||
__all__ = [
|
||||
"install",
|
||||
"uninstall",
|
||||
"ls",
|
||||
"publish",
|
||||
"containerise",
|
||||
|
||||
"Creator",
|
||||
"Loader",
|
||||
|
||||
def install():
|
||||
"""Install Blender configuration for Avalon."""
|
||||
sys.excepthook = pype_excepthook_handler
|
||||
pyblish.register_plugin_path(str(PUBLISH_PATH))
|
||||
avalon.register_plugin_path(avalon.Loader, str(LOAD_PATH))
|
||||
avalon.register_plugin_path(avalon.Creator, str(CREATE_PATH))
|
||||
append_user_scripts()
|
||||
avalon.on("new", on_new)
|
||||
avalon.on("open", on_open)
|
||||
# Workfiles API
|
||||
"open_file",
|
||||
"save_file",
|
||||
"current_file",
|
||||
"has_unsaved_changes",
|
||||
"file_extensions",
|
||||
"work_root",
|
||||
|
||||
|
||||
def uninstall():
|
||||
"""Uninstall Blender configuration for Avalon."""
|
||||
sys.excepthook = ORIGINAL_EXCEPTHOOK
|
||||
pyblish.deregister_plugin_path(str(PUBLISH_PATH))
|
||||
avalon.deregister_plugin_path(avalon.Loader, str(LOAD_PATH))
|
||||
avalon.deregister_plugin_path(avalon.Creator, str(CREATE_PATH))
|
||||
|
||||
|
||||
def set_start_end_frames():
|
||||
from avalon import io
|
||||
|
||||
asset_name = io.Session["AVALON_ASSET"]
|
||||
asset_doc = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
})
|
||||
|
||||
scene = bpy.context.scene
|
||||
|
||||
# Default scene settings
|
||||
frameStart = scene.frame_start
|
||||
frameEnd = scene.frame_end
|
||||
fps = scene.render.fps
|
||||
resolution_x = scene.render.resolution_x
|
||||
resolution_y = scene.render.resolution_y
|
||||
|
||||
# Check if settings are set
|
||||
data = asset_doc.get("data")
|
||||
|
||||
if not data:
|
||||
return
|
||||
|
||||
if data.get("frameStart"):
|
||||
frameStart = data.get("frameStart")
|
||||
if data.get("frameEnd"):
|
||||
frameEnd = data.get("frameEnd")
|
||||
if data.get("fps"):
|
||||
fps = data.get("fps")
|
||||
if data.get("resolutionWidth"):
|
||||
resolution_x = data.get("resolutionWidth")
|
||||
if data.get("resolutionHeight"):
|
||||
resolution_y = data.get("resolutionHeight")
|
||||
|
||||
scene.frame_start = frameStart
|
||||
scene.frame_end = frameEnd
|
||||
scene.render.fps = fps
|
||||
scene.render.resolution_x = resolution_x
|
||||
scene.render.resolution_y = resolution_y
|
||||
|
||||
|
||||
def on_new(arg1, arg2):
|
||||
set_start_end_frames()
|
||||
|
||||
|
||||
def on_open(arg1, arg2):
|
||||
set_start_end_frames()
|
||||
# Utility functions
|
||||
"maintained_selection",
|
||||
"lsattr",
|
||||
"lsattrs",
|
||||
"read",
|
||||
"get_selection",
|
||||
# "unique_name",
|
||||
]
|
||||
|
|
|
|||
BIN
openpype/hosts/blender/api/icons/pyblish-32x32.png
Normal file
BIN
openpype/hosts/blender/api/icons/pyblish-32x32.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 632 B |
|
|
@ -1,9 +1,16 @@
|
|||
import os
|
||||
import traceback
|
||||
import importlib
|
||||
import contextlib
|
||||
from typing import Dict, List, Union
|
||||
|
||||
import bpy
|
||||
import addon_utils
|
||||
from openpype.api import Logger
|
||||
|
||||
from . import pipeline
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
def load_scripts(paths):
|
||||
|
|
@ -125,3 +132,155 @@ def append_user_scripts():
|
|||
except Exception:
|
||||
print("Couldn't load user scripts \"{}\"".format(user_scripts))
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def imprint(node: bpy.types.bpy_struct_meta_idprop, data: Dict):
|
||||
r"""Write `data` to `node` as userDefined attributes
|
||||
|
||||
Arguments:
|
||||
node: Long name of node
|
||||
data: Dictionary of key/value pairs
|
||||
|
||||
Example:
|
||||
>>> import bpy
|
||||
>>> def compute():
|
||||
... return 6
|
||||
...
|
||||
>>> bpy.ops.mesh.primitive_cube_add()
|
||||
>>> cube = bpy.context.view_layer.objects.active
|
||||
>>> imprint(cube, {
|
||||
... "regularString": "myFamily",
|
||||
... "computedValue": lambda: compute()
|
||||
... })
|
||||
...
|
||||
>>> cube['avalon']['computedValue']
|
||||
6
|
||||
"""
|
||||
|
||||
imprint_data = dict()
|
||||
|
||||
for key, value in data.items():
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
if callable(value):
|
||||
# Support values evaluated at imprint
|
||||
value = value()
|
||||
|
||||
if not isinstance(value, (int, float, bool, str, list)):
|
||||
raise TypeError(f"Unsupported type: {type(value)}")
|
||||
|
||||
imprint_data[key] = value
|
||||
|
||||
pipeline.metadata_update(node, imprint_data)
|
||||
|
||||
|
||||
def lsattr(attr: str,
|
||||
value: Union[str, int, bool, List, Dict, None] = None) -> List:
|
||||
r"""Return nodes matching `attr` and `value`
|
||||
|
||||
Arguments:
|
||||
attr: Name of Blender property
|
||||
value: Value of attribute. If none
|
||||
is provided, return all nodes with this attribute.
|
||||
|
||||
Example:
|
||||
>>> lsattr("id", "myId")
|
||||
... [bpy.data.objects["myNode"]
|
||||
>>> lsattr("id")
|
||||
... [bpy.data.objects["myNode"], bpy.data.objects["myOtherNode"]]
|
||||
|
||||
Returns:
|
||||
list
|
||||
"""
|
||||
|
||||
return lsattrs({attr: value})
|
||||
|
||||
|
||||
def lsattrs(attrs: Dict) -> List:
|
||||
r"""Return nodes with the given attribute(s).
|
||||
|
||||
Arguments:
|
||||
attrs: Name and value pairs of expected matches
|
||||
|
||||
Example:
|
||||
>>> lsattrs({"age": 5}) # Return nodes with an `age` of 5
|
||||
# Return nodes with both `age` and `color` of 5 and blue
|
||||
>>> lsattrs({"age": 5, "color": "blue"})
|
||||
|
||||
Returns a list.
|
||||
|
||||
"""
|
||||
|
||||
# For now return all objects, not filtered by scene/collection/view_layer.
|
||||
matches = set()
|
||||
for coll in dir(bpy.data):
|
||||
if not isinstance(
|
||||
getattr(bpy.data, coll),
|
||||
bpy.types.bpy_prop_collection,
|
||||
):
|
||||
continue
|
||||
for node in getattr(bpy.data, coll):
|
||||
for attr, value in attrs.items():
|
||||
avalon_prop = node.get(pipeline.AVALON_PROPERTY)
|
||||
if not avalon_prop:
|
||||
continue
|
||||
if (avalon_prop.get(attr)
|
||||
and (value is None or avalon_prop.get(attr) == value)):
|
||||
matches.add(node)
|
||||
return list(matches)
|
||||
|
||||
|
||||
def read(node: bpy.types.bpy_struct_meta_idprop):
|
||||
"""Return user-defined attributes from `node`"""
|
||||
|
||||
data = dict(node.get(pipeline.AVALON_PROPERTY))
|
||||
|
||||
# Ignore hidden/internal data
|
||||
data = {
|
||||
key: value
|
||||
for key, value in data.items() if not key.startswith("_")
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def get_selection() -> List[bpy.types.Object]:
|
||||
"""Return the selected objects from the current scene."""
|
||||
return [obj for obj in bpy.context.scene.objects if obj.select_get()]
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_selection():
|
||||
r"""Maintain selection during context
|
||||
|
||||
Example:
|
||||
>>> with maintained_selection():
|
||||
... # Modify selection
|
||||
... bpy.ops.object.select_all(action='DESELECT')
|
||||
>>> # Selection restored
|
||||
"""
|
||||
|
||||
previous_selection = get_selection()
|
||||
previous_active = bpy.context.view_layer.objects.active
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
# Clear the selection
|
||||
for node in get_selection():
|
||||
node.select_set(state=False)
|
||||
if previous_selection:
|
||||
for node in previous_selection:
|
||||
try:
|
||||
node.select_set(state=True)
|
||||
except ReferenceError:
|
||||
# This could happen if a selected node was deleted during
|
||||
# the context.
|
||||
log.exception("Failed to reselect")
|
||||
continue
|
||||
try:
|
||||
bpy.context.view_layer.objects.active = previous_active
|
||||
except ReferenceError:
|
||||
# This could happen if the active node was deleted during the
|
||||
# context.
|
||||
log.exception("Failed to set active object.")
|
||||
|
|
|
|||
423
openpype/hosts/blender/api/ops.py
Normal file
423
openpype/hosts/blender/api/ops.py
Normal file
|
|
@ -0,0 +1,423 @@
|
|||
"""Blender operators and menus for use with Avalon."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import platform
|
||||
import time
|
||||
import traceback
|
||||
import collections
|
||||
from pathlib import Path
|
||||
from types import ModuleType
|
||||
from typing import Dict, List, Optional, Union
|
||||
|
||||
from Qt import QtWidgets, QtCore
|
||||
|
||||
import bpy
|
||||
import bpy.utils.previews
|
||||
|
||||
import avalon.api
|
||||
from openpype.tools.utils import host_tools
|
||||
from openpype import style
|
||||
|
||||
from .workio import OpenFileCacher
|
||||
|
||||
PREVIEW_COLLECTIONS: Dict = dict()
|
||||
|
||||
# This seems like a good value to keep the Qt app responsive and doesn't slow
|
||||
# down Blender. At least on macOS I the interace of Blender gets very laggy if
|
||||
# you make it smaller.
|
||||
TIMER_INTERVAL: float = 0.01
|
||||
|
||||
|
||||
class BlenderApplication(QtWidgets.QApplication):
|
||||
_instance = None
|
||||
blender_windows = {}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(BlenderApplication, self).__init__(*args, **kwargs)
|
||||
self.setQuitOnLastWindowClosed(False)
|
||||
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
self.lastWindowClosed.connect(self.__class__.reset)
|
||||
|
||||
@classmethod
|
||||
def get_app(cls):
|
||||
if cls._instance is None:
|
||||
cls._instance = cls(sys.argv)
|
||||
return cls._instance
|
||||
|
||||
@classmethod
|
||||
def reset(cls):
|
||||
cls._instance = None
|
||||
|
||||
@classmethod
|
||||
def store_window(cls, identifier, window):
|
||||
current_window = cls.get_window(identifier)
|
||||
cls.blender_windows[identifier] = window
|
||||
if current_window:
|
||||
current_window.close()
|
||||
# current_window.deleteLater()
|
||||
|
||||
@classmethod
|
||||
def get_window(cls, identifier):
|
||||
return cls.blender_windows.get(identifier)
|
||||
|
||||
|
||||
class MainThreadItem:
|
||||
"""Structure to store information about callback in main thread.
|
||||
|
||||
Item should be used to execute callback in main thread which may be needed
|
||||
for execution of Qt objects.
|
||||
|
||||
Item store callback (callable variable), arguments and keyword arguments
|
||||
for the callback. Item hold information about it's process.
|
||||
"""
|
||||
not_set = object()
|
||||
sleep_time = 0.1
|
||||
|
||||
def __init__(self, callback, *args, **kwargs):
|
||||
self.done = False
|
||||
self.exception = self.not_set
|
||||
self.result = self.not_set
|
||||
self.callback = callback
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
def execute(self):
|
||||
"""Execute callback and store it's result.
|
||||
|
||||
Method must be called from main thread. Item is marked as `done`
|
||||
when callback execution finished. Store output of callback of exception
|
||||
information when callback raise one.
|
||||
"""
|
||||
print("Executing process in main thread")
|
||||
if self.done:
|
||||
print("- item is already processed")
|
||||
return
|
||||
|
||||
callback = self.callback
|
||||
args = self.args
|
||||
kwargs = self.kwargs
|
||||
print("Running callback: {}".format(str(callback)))
|
||||
try:
|
||||
result = callback(*args, **kwargs)
|
||||
self.result = result
|
||||
|
||||
except Exception:
|
||||
self.exception = sys.exc_info()
|
||||
|
||||
finally:
|
||||
print("Done")
|
||||
self.done = True
|
||||
|
||||
def wait(self):
|
||||
"""Wait for result from main thread.
|
||||
|
||||
This method stops current thread until callback is executed.
|
||||
|
||||
Returns:
|
||||
object: Output of callback. May be any type or object.
|
||||
|
||||
Raises:
|
||||
Exception: Reraise any exception that happened during callback
|
||||
execution.
|
||||
"""
|
||||
while not self.done:
|
||||
print(self.done)
|
||||
time.sleep(self.sleep_time)
|
||||
|
||||
if self.exception is self.not_set:
|
||||
return self.result
|
||||
raise self.exception
|
||||
|
||||
|
||||
class GlobalClass:
|
||||
app = None
|
||||
main_thread_callbacks = collections.deque()
|
||||
is_windows = platform.system().lower() == "windows"
|
||||
|
||||
|
||||
def execute_in_main_thread(main_thead_item):
|
||||
print("execute_in_main_thread")
|
||||
GlobalClass.main_thread_callbacks.append(main_thead_item)
|
||||
|
||||
|
||||
def _process_app_events() -> Optional[float]:
|
||||
"""Process the events of the Qt app if the window is still visible.
|
||||
|
||||
If the app has any top level windows and at least one of them is visible
|
||||
return the time after which this function should be run again. Else return
|
||||
None, so the function is not run again and will be unregistered.
|
||||
"""
|
||||
while GlobalClass.main_thread_callbacks:
|
||||
main_thread_item = GlobalClass.main_thread_callbacks.popleft()
|
||||
main_thread_item.execute()
|
||||
if main_thread_item.exception is not MainThreadItem.not_set:
|
||||
_clc, val, tb = main_thread_item.exception
|
||||
msg = str(val)
|
||||
detail = "\n".join(traceback.format_exception(_clc, val, tb))
|
||||
dialog = QtWidgets.QMessageBox(
|
||||
QtWidgets.QMessageBox.Warning,
|
||||
"Error",
|
||||
msg)
|
||||
dialog.setMinimumWidth(500)
|
||||
dialog.setDetailedText(detail)
|
||||
dialog.exec_()
|
||||
|
||||
if not GlobalClass.is_windows:
|
||||
if OpenFileCacher.opening_file:
|
||||
return TIMER_INTERVAL
|
||||
|
||||
app = GlobalClass.app
|
||||
if app._instance:
|
||||
app.processEvents()
|
||||
return TIMER_INTERVAL
|
||||
return TIMER_INTERVAL
|
||||
|
||||
|
||||
class LaunchQtApp(bpy.types.Operator):
|
||||
"""A Base class for opertors to launch a Qt app."""
|
||||
|
||||
_app: QtWidgets.QApplication
|
||||
_window = Union[QtWidgets.QDialog, ModuleType]
|
||||
_tool_name: str = None
|
||||
_init_args: Optional[List] = list()
|
||||
_init_kwargs: Optional[Dict] = dict()
|
||||
bl_idname: str = None
|
||||
|
||||
def __init__(self):
|
||||
if self.bl_idname is None:
|
||||
raise NotImplementedError("Attribute `bl_idname` must be set!")
|
||||
print(f"Initialising {self.bl_idname}...")
|
||||
self._app = BlenderApplication.get_app()
|
||||
GlobalClass.app = self._app
|
||||
|
||||
bpy.app.timers.register(
|
||||
_process_app_events,
|
||||
persistent=True
|
||||
)
|
||||
|
||||
def execute(self, context):
|
||||
"""Execute the operator.
|
||||
|
||||
The child class must implement `execute()` where it only has to set
|
||||
`self._window` to the desired Qt window and then simply run
|
||||
`return super().execute(context)`.
|
||||
`self._window` is expected to have a `show` method.
|
||||
If the `show` method requires arguments, you can set `self._show_args`
|
||||
and `self._show_kwargs`. `args` should be a list, `kwargs` a
|
||||
dictionary.
|
||||
"""
|
||||
|
||||
if self._tool_name is None:
|
||||
if self._window is None:
|
||||
raise AttributeError("`self._window` is not set.")
|
||||
|
||||
else:
|
||||
window = self._app.get_window(self.bl_idname)
|
||||
if window is None:
|
||||
window = host_tools.get_tool_by_name(self._tool_name)
|
||||
self._app.store_window(self.bl_idname, window)
|
||||
self._window = window
|
||||
|
||||
if not isinstance(
|
||||
self._window,
|
||||
(QtWidgets.QMainWindow, QtWidgets.QDialog, ModuleType)
|
||||
):
|
||||
raise AttributeError(
|
||||
"`window` should be a `QDialog or module`. Got: {}".format(
|
||||
str(type(window))
|
||||
)
|
||||
)
|
||||
|
||||
self.before_window_show()
|
||||
|
||||
if isinstance(self._window, ModuleType):
|
||||
self._window.show()
|
||||
window = None
|
||||
if hasattr(self._window, "window"):
|
||||
window = self._window.window
|
||||
elif hasattr(self._window, "_window"):
|
||||
window = self._window.window
|
||||
|
||||
if window:
|
||||
self._app.store_window(self.bl_idname, window)
|
||||
|
||||
else:
|
||||
origin_flags = self._window.windowFlags()
|
||||
on_top_flags = origin_flags | QtCore.Qt.WindowStaysOnTopHint
|
||||
self._window.setWindowFlags(on_top_flags)
|
||||
self._window.show()
|
||||
|
||||
if on_top_flags != origin_flags:
|
||||
self._window.setWindowFlags(origin_flags)
|
||||
self._window.show()
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
def before_window_show(self):
|
||||
return
|
||||
|
||||
|
||||
class LaunchCreator(LaunchQtApp):
|
||||
"""Launch Avalon Creator."""
|
||||
|
||||
bl_idname = "wm.avalon_creator"
|
||||
bl_label = "Create..."
|
||||
_tool_name = "creator"
|
||||
|
||||
def before_window_show(self):
|
||||
self._window.refresh()
|
||||
|
||||
|
||||
class LaunchLoader(LaunchQtApp):
|
||||
"""Launch Avalon Loader."""
|
||||
|
||||
bl_idname = "wm.avalon_loader"
|
||||
bl_label = "Load..."
|
||||
_tool_name = "loader"
|
||||
|
||||
def before_window_show(self):
|
||||
self._window.set_context(
|
||||
{"asset": avalon.api.Session["AVALON_ASSET"]},
|
||||
refresh=True
|
||||
)
|
||||
|
||||
|
||||
class LaunchPublisher(LaunchQtApp):
|
||||
"""Launch Avalon Publisher."""
|
||||
|
||||
bl_idname = "wm.avalon_publisher"
|
||||
bl_label = "Publish..."
|
||||
|
||||
def execute(self, context):
|
||||
host_tools.show_publish()
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class LaunchManager(LaunchQtApp):
|
||||
"""Launch Avalon Manager."""
|
||||
|
||||
bl_idname = "wm.avalon_manager"
|
||||
bl_label = "Manage..."
|
||||
_tool_name = "sceneinventory"
|
||||
|
||||
def before_window_show(self):
|
||||
self._window.refresh()
|
||||
|
||||
|
||||
class LaunchLibrary(LaunchQtApp):
|
||||
"""Launch Library Loader."""
|
||||
|
||||
bl_idname = "wm.library_loader"
|
||||
bl_label = "Library..."
|
||||
_tool_name = "libraryloader"
|
||||
|
||||
def before_window_show(self):
|
||||
self._window.refresh()
|
||||
|
||||
|
||||
class LaunchWorkFiles(LaunchQtApp):
|
||||
"""Launch Avalon Work Files."""
|
||||
|
||||
bl_idname = "wm.avalon_workfiles"
|
||||
bl_label = "Work Files..."
|
||||
_tool_name = "workfiles"
|
||||
|
||||
def execute(self, context):
|
||||
result = super().execute(context)
|
||||
self._window.set_context({
|
||||
"asset": avalon.api.Session["AVALON_ASSET"],
|
||||
"silo": avalon.api.Session["AVALON_SILO"],
|
||||
"task": avalon.api.Session["AVALON_TASK"]
|
||||
})
|
||||
return result
|
||||
|
||||
def before_window_show(self):
|
||||
self._window.root = str(Path(
|
||||
os.environ.get("AVALON_WORKDIR", ""),
|
||||
os.environ.get("AVALON_SCENEDIR", ""),
|
||||
))
|
||||
self._window.refresh()
|
||||
|
||||
|
||||
class TOPBAR_MT_avalon(bpy.types.Menu):
|
||||
"""Avalon menu."""
|
||||
|
||||
bl_idname = "TOPBAR_MT_avalon"
|
||||
bl_label = os.environ.get("AVALON_LABEL")
|
||||
|
||||
def draw(self, context):
|
||||
"""Draw the menu in the UI."""
|
||||
|
||||
layout = self.layout
|
||||
|
||||
pcoll = PREVIEW_COLLECTIONS.get("avalon")
|
||||
if pcoll:
|
||||
pyblish_menu_icon = pcoll["pyblish_menu_icon"]
|
||||
pyblish_menu_icon_id = pyblish_menu_icon.icon_id
|
||||
else:
|
||||
pyblish_menu_icon_id = 0
|
||||
|
||||
asset = avalon.api.Session['AVALON_ASSET']
|
||||
task = avalon.api.Session['AVALON_TASK']
|
||||
context_label = f"{asset}, {task}"
|
||||
context_label_item = layout.row()
|
||||
context_label_item.operator(
|
||||
LaunchWorkFiles.bl_idname, text=context_label
|
||||
)
|
||||
context_label_item.enabled = False
|
||||
layout.separator()
|
||||
layout.operator(LaunchCreator.bl_idname, text="Create...")
|
||||
layout.operator(LaunchLoader.bl_idname, text="Load...")
|
||||
layout.operator(
|
||||
LaunchPublisher.bl_idname,
|
||||
text="Publish...",
|
||||
icon_value=pyblish_menu_icon_id,
|
||||
)
|
||||
layout.operator(LaunchManager.bl_idname, text="Manage...")
|
||||
layout.operator(LaunchLibrary.bl_idname, text="Library...")
|
||||
layout.separator()
|
||||
layout.operator(LaunchWorkFiles.bl_idname, text="Work Files...")
|
||||
# TODO (jasper): maybe add 'Reload Pipeline', 'Reset Frame Range' and
|
||||
# 'Reset Resolution'?
|
||||
|
||||
|
||||
def draw_avalon_menu(self, context):
|
||||
"""Draw the Avalon menu in the top bar."""
|
||||
|
||||
self.layout.menu(TOPBAR_MT_avalon.bl_idname)
|
||||
|
||||
|
||||
classes = [
|
||||
LaunchCreator,
|
||||
LaunchLoader,
|
||||
LaunchPublisher,
|
||||
LaunchManager,
|
||||
LaunchLibrary,
|
||||
LaunchWorkFiles,
|
||||
TOPBAR_MT_avalon,
|
||||
]
|
||||
|
||||
|
||||
def register():
|
||||
"Register the operators and menu."
|
||||
|
||||
pcoll = bpy.utils.previews.new()
|
||||
pyblish_icon_file = Path(__file__).parent / "icons" / "pyblish-32x32.png"
|
||||
pcoll.load("pyblish_menu_icon", str(pyblish_icon_file.absolute()), 'IMAGE')
|
||||
PREVIEW_COLLECTIONS["avalon"] = pcoll
|
||||
|
||||
for cls in classes:
|
||||
bpy.utils.register_class(cls)
|
||||
bpy.types.TOPBAR_MT_editor_menus.append(draw_avalon_menu)
|
||||
|
||||
|
||||
def unregister():
|
||||
"""Unregister the operators and menu."""
|
||||
|
||||
pcoll = PREVIEW_COLLECTIONS.pop("avalon")
|
||||
bpy.utils.previews.remove(pcoll)
|
||||
bpy.types.TOPBAR_MT_editor_menus.remove(draw_avalon_menu)
|
||||
for cls in reversed(classes):
|
||||
bpy.utils.unregister_class(cls)
|
||||
430
openpype/hosts/blender/api/pipeline.py
Normal file
430
openpype/hosts/blender/api/pipeline.py
Normal file
|
|
@ -0,0 +1,430 @@
|
|||
import os
|
||||
import sys
|
||||
import importlib
|
||||
import traceback
|
||||
from typing import Callable, Dict, Iterator, List, Optional
|
||||
|
||||
import bpy
|
||||
|
||||
from . import lib
|
||||
from . import ops
|
||||
|
||||
import pyblish.api
|
||||
import avalon.api
|
||||
from avalon import io, schema
|
||||
from avalon.pipeline import AVALON_CONTAINER_ID
|
||||
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from openpype.api import Logger
|
||||
from openpype.lib import (
|
||||
register_event_callback,
|
||||
emit_event
|
||||
)
|
||||
import openpype.hosts.blender
|
||||
|
||||
HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.blender.__file__))
|
||||
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
ORIGINAL_EXCEPTHOOK = sys.excepthook
|
||||
|
||||
AVALON_INSTANCES = "AVALON_INSTANCES"
|
||||
AVALON_CONTAINERS = "AVALON_CONTAINERS"
|
||||
AVALON_PROPERTY = 'avalon'
|
||||
IS_HEADLESS = bpy.app.background
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
def pype_excepthook_handler(*args):
|
||||
traceback.print_exception(*args)
|
||||
|
||||
|
||||
def install():
|
||||
"""Install Blender configuration for Avalon."""
|
||||
sys.excepthook = pype_excepthook_handler
|
||||
|
||||
pyblish.api.register_host("blender")
|
||||
pyblish.api.register_plugin_path(str(PUBLISH_PATH))
|
||||
|
||||
avalon.api.register_plugin_path(avalon.api.Loader, str(LOAD_PATH))
|
||||
avalon.api.register_plugin_path(LegacyCreator, str(CREATE_PATH))
|
||||
|
||||
lib.append_user_scripts()
|
||||
|
||||
register_event_callback("new", on_new)
|
||||
register_event_callback("open", on_open)
|
||||
|
||||
_register_callbacks()
|
||||
_register_events()
|
||||
|
||||
if not IS_HEADLESS:
|
||||
ops.register()
|
||||
|
||||
|
||||
def uninstall():
|
||||
"""Uninstall Blender configuration for Avalon."""
|
||||
sys.excepthook = ORIGINAL_EXCEPTHOOK
|
||||
|
||||
pyblish.api.deregister_host("blender")
|
||||
pyblish.api.deregister_plugin_path(str(PUBLISH_PATH))
|
||||
|
||||
avalon.api.deregister_plugin_path(avalon.api.Loader, str(LOAD_PATH))
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, str(CREATE_PATH))
|
||||
|
||||
if not IS_HEADLESS:
|
||||
ops.unregister()
|
||||
|
||||
|
||||
def set_start_end_frames():
|
||||
asset_name = io.Session["AVALON_ASSET"]
|
||||
asset_doc = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
})
|
||||
|
||||
scene = bpy.context.scene
|
||||
|
||||
# Default scene settings
|
||||
frameStart = scene.frame_start
|
||||
frameEnd = scene.frame_end
|
||||
fps = scene.render.fps
|
||||
resolution_x = scene.render.resolution_x
|
||||
resolution_y = scene.render.resolution_y
|
||||
|
||||
# Check if settings are set
|
||||
data = asset_doc.get("data")
|
||||
|
||||
if not data:
|
||||
return
|
||||
|
||||
if data.get("frameStart"):
|
||||
frameStart = data.get("frameStart")
|
||||
if data.get("frameEnd"):
|
||||
frameEnd = data.get("frameEnd")
|
||||
if data.get("fps"):
|
||||
fps = data.get("fps")
|
||||
if data.get("resolutionWidth"):
|
||||
resolution_x = data.get("resolutionWidth")
|
||||
if data.get("resolutionHeight"):
|
||||
resolution_y = data.get("resolutionHeight")
|
||||
|
||||
scene.frame_start = frameStart
|
||||
scene.frame_end = frameEnd
|
||||
scene.render.fps = fps
|
||||
scene.render.resolution_x = resolution_x
|
||||
scene.render.resolution_y = resolution_y
|
||||
|
||||
|
||||
def on_new():
|
||||
set_start_end_frames()
|
||||
|
||||
|
||||
def on_open():
|
||||
set_start_end_frames()
|
||||
|
||||
|
||||
@bpy.app.handlers.persistent
|
||||
def _on_save_pre(*args):
|
||||
emit_event("before.save")
|
||||
|
||||
|
||||
@bpy.app.handlers.persistent
|
||||
def _on_save_post(*args):
|
||||
emit_event("save")
|
||||
|
||||
|
||||
@bpy.app.handlers.persistent
|
||||
def _on_load_post(*args):
|
||||
# Detect new file or opening an existing file
|
||||
if bpy.data.filepath:
|
||||
# Likely this was an open operation since it has a filepath
|
||||
emit_event("open")
|
||||
else:
|
||||
emit_event("new")
|
||||
|
||||
ops.OpenFileCacher.post_load()
|
||||
|
||||
|
||||
def _register_callbacks():
|
||||
"""Register callbacks for certain events."""
|
||||
def _remove_handler(handlers: List, callback: Callable):
|
||||
"""Remove the callback from the given handler list."""
|
||||
|
||||
try:
|
||||
handlers.remove(callback)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# TODO (jasper): implement on_init callback?
|
||||
|
||||
# Be sure to remove existig ones first.
|
||||
_remove_handler(bpy.app.handlers.save_pre, _on_save_pre)
|
||||
_remove_handler(bpy.app.handlers.save_post, _on_save_post)
|
||||
_remove_handler(bpy.app.handlers.load_post, _on_load_post)
|
||||
|
||||
bpy.app.handlers.save_pre.append(_on_save_pre)
|
||||
bpy.app.handlers.save_post.append(_on_save_post)
|
||||
bpy.app.handlers.load_post.append(_on_load_post)
|
||||
|
||||
log.info("Installed event handler _on_save_pre...")
|
||||
log.info("Installed event handler _on_save_post...")
|
||||
log.info("Installed event handler _on_load_post...")
|
||||
|
||||
|
||||
def _on_task_changed():
|
||||
"""Callback for when the task in the context is changed."""
|
||||
|
||||
# TODO (jasper): Blender has no concept of projects or workspace.
|
||||
# It would be nice to override 'bpy.ops.wm.open_mainfile' so it takes the
|
||||
# workdir as starting directory. But I don't know if that is possible.
|
||||
# Another option would be to create a custom 'File Selector' and add the
|
||||
# `directory` attribute, so it opens in that directory (does it?).
|
||||
# https://docs.blender.org/api/blender2.8/bpy.types.Operator.html#calling-a-file-selector
|
||||
# https://docs.blender.org/api/blender2.8/bpy.types.WindowManager.html#bpy.types.WindowManager.fileselect_add
|
||||
workdir = avalon.api.Session["AVALON_WORKDIR"]
|
||||
log.debug("New working directory: %s", workdir)
|
||||
|
||||
|
||||
def _register_events():
|
||||
"""Install callbacks for specific events."""
|
||||
|
||||
register_event_callback("taskChanged", _on_task_changed)
|
||||
log.info("Installed event callback for 'taskChanged'...")
|
||||
|
||||
|
||||
def reload_pipeline(*args):
|
||||
"""Attempt to reload pipeline at run-time.
|
||||
|
||||
Warning:
|
||||
This is primarily for development and debugging purposes and not well
|
||||
tested.
|
||||
|
||||
"""
|
||||
|
||||
avalon.api.uninstall()
|
||||
|
||||
for module in (
|
||||
"avalon.io",
|
||||
"avalon.lib",
|
||||
"avalon.pipeline",
|
||||
"avalon.api",
|
||||
):
|
||||
module = importlib.import_module(module)
|
||||
importlib.reload(module)
|
||||
|
||||
|
||||
def _discover_gui() -> Optional[Callable]:
|
||||
"""Return the most desirable of the currently registered GUIs"""
|
||||
|
||||
# Prefer last registered
|
||||
guis = reversed(pyblish.api.registered_guis())
|
||||
|
||||
for gui in guis:
|
||||
try:
|
||||
gui = __import__(gui).show
|
||||
except (ImportError, AttributeError):
|
||||
continue
|
||||
else:
|
||||
return gui
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def add_to_avalon_container(container: bpy.types.Collection):
|
||||
"""Add the container to the Avalon container."""
|
||||
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_container:
|
||||
avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS)
|
||||
|
||||
# Link the container to the scene so it's easily visible to the artist
|
||||
# and can be managed easily. Otherwise it's only found in "Blender
|
||||
# File" view and it will be removed by Blenders garbage collection,
|
||||
# unless you set a 'fake user'.
|
||||
bpy.context.scene.collection.children.link(avalon_container)
|
||||
|
||||
avalon_container.children.link(container)
|
||||
|
||||
# Disable Avalon containers for the view layers.
|
||||
for view_layer in bpy.context.scene.view_layers:
|
||||
for child in view_layer.layer_collection.children:
|
||||
if child.collection == avalon_container:
|
||||
child.exclude = True
|
||||
|
||||
|
||||
def metadata_update(node: bpy.types.bpy_struct_meta_idprop, data: Dict):
|
||||
"""Imprint the node with metadata.
|
||||
|
||||
Existing metadata will be updated.
|
||||
"""
|
||||
|
||||
if not node.get(AVALON_PROPERTY):
|
||||
node[AVALON_PROPERTY] = dict()
|
||||
for key, value in data.items():
|
||||
if value is None:
|
||||
continue
|
||||
node[AVALON_PROPERTY][key] = value
|
||||
|
||||
|
||||
def containerise(name: str,
|
||||
namespace: str,
|
||||
nodes: List,
|
||||
context: Dict,
|
||||
loader: Optional[str] = None,
|
||||
suffix: Optional[str] = "CON") -> bpy.types.Collection:
|
||||
"""Bundle `nodes` into an assembly and imprint it with metadata
|
||||
|
||||
Containerisation enables a tracking of version, author and origin
|
||||
for loaded assets.
|
||||
|
||||
Arguments:
|
||||
name: Name of resulting assembly
|
||||
namespace: Namespace under which to host container
|
||||
nodes: Long names of nodes to containerise
|
||||
context: Asset information
|
||||
loader: Name of loader used to produce this container.
|
||||
suffix: Suffix of container, defaults to `_CON`.
|
||||
|
||||
Returns:
|
||||
The container assembly
|
||||
|
||||
"""
|
||||
|
||||
node_name = f"{context['asset']['name']}_{name}"
|
||||
if namespace:
|
||||
node_name = f"{namespace}:{node_name}"
|
||||
if suffix:
|
||||
node_name = f"{node_name}_{suffix}"
|
||||
container = bpy.data.collections.new(name=node_name)
|
||||
# Link the children nodes
|
||||
for obj in nodes:
|
||||
container.objects.link(obj)
|
||||
|
||||
data = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": name,
|
||||
"namespace": namespace or '',
|
||||
"loader": str(loader),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
}
|
||||
|
||||
metadata_update(container, data)
|
||||
add_to_avalon_container(container)
|
||||
|
||||
return container
|
||||
|
||||
|
||||
def containerise_existing(
|
||||
container: bpy.types.Collection,
|
||||
name: str,
|
||||
namespace: str,
|
||||
context: Dict,
|
||||
loader: Optional[str] = None,
|
||||
suffix: Optional[str] = "CON") -> bpy.types.Collection:
|
||||
"""Imprint or update container with metadata.
|
||||
|
||||
Arguments:
|
||||
name: Name of resulting assembly
|
||||
namespace: Namespace under which to host container
|
||||
context: Asset information
|
||||
loader: Name of loader used to produce this container.
|
||||
suffix: Suffix of container, defaults to `_CON`.
|
||||
|
||||
Returns:
|
||||
The container assembly
|
||||
"""
|
||||
|
||||
node_name = container.name
|
||||
if suffix:
|
||||
node_name = f"{node_name}_{suffix}"
|
||||
container.name = node_name
|
||||
data = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": name,
|
||||
"namespace": namespace or '',
|
||||
"loader": str(loader),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
}
|
||||
|
||||
metadata_update(container, data)
|
||||
add_to_avalon_container(container)
|
||||
|
||||
return container
|
||||
|
||||
|
||||
def parse_container(container: bpy.types.Collection,
|
||||
validate: bool = True) -> Dict:
|
||||
"""Return the container node's full container data.
|
||||
|
||||
Args:
|
||||
container: A container node name.
|
||||
validate: turn the validation for the container on or off
|
||||
|
||||
Returns:
|
||||
The container schema data for this container node.
|
||||
|
||||
"""
|
||||
|
||||
data = lib.read(container)
|
||||
|
||||
# Append transient data
|
||||
data["objectName"] = container.name
|
||||
|
||||
if validate:
|
||||
schema.validate(data)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def ls() -> Iterator:
|
||||
"""List containers from active Blender scene.
|
||||
|
||||
This is the host-equivalent of api.ls(), but instead of listing assets on
|
||||
disk, it lists assets already loaded in Blender; once loaded they are
|
||||
called containers.
|
||||
"""
|
||||
|
||||
for container in lib.lsattr("id", AVALON_CONTAINER_ID):
|
||||
yield parse_container(container)
|
||||
|
||||
|
||||
def update_hierarchy(containers):
|
||||
"""Hierarchical container support
|
||||
|
||||
This is the function to support Scene Inventory to draw hierarchical
|
||||
view for containers.
|
||||
|
||||
We need both parent and children to visualize the graph.
|
||||
|
||||
"""
|
||||
|
||||
all_containers = set(ls()) # lookup set
|
||||
|
||||
for container in containers:
|
||||
# Find parent
|
||||
# FIXME (jasperge): re-evaluate this. How would it be possible
|
||||
# to 'nest' assets? Collections can have several parents, for
|
||||
# now assume it has only 1 parent
|
||||
parent = [
|
||||
coll for coll in bpy.data.collections if container in coll.children
|
||||
]
|
||||
for node in parent:
|
||||
if node in all_containers:
|
||||
container["parent"] = node
|
||||
break
|
||||
|
||||
log.debug("Container: %s", container)
|
||||
|
||||
yield container
|
||||
|
||||
|
||||
def publish():
|
||||
"""Shorthand to publish from within host."""
|
||||
|
||||
return pyblish.util.publish()
|
||||
|
|
@ -5,10 +5,17 @@ from typing import Dict, List, Optional
|
|||
|
||||
import bpy
|
||||
|
||||
from avalon import api, blender
|
||||
from avalon.blender import ops
|
||||
from avalon.blender.pipeline import AVALON_CONTAINERS
|
||||
from openpype.api import PypeCreatorMixin
|
||||
import avalon.api
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from .pipeline import AVALON_CONTAINERS
|
||||
from .ops import (
|
||||
MainThreadItem,
|
||||
execute_in_main_thread
|
||||
)
|
||||
from .lib import (
|
||||
imprint,
|
||||
get_selection
|
||||
)
|
||||
|
||||
VALID_EXTENSIONS = [".blend", ".json", ".abc", ".fbx"]
|
||||
|
||||
|
|
@ -42,10 +49,13 @@ def get_unique_number(
|
|||
return f"{count:0>2}"
|
||||
|
||||
|
||||
def prepare_data(data, container_name):
|
||||
def prepare_data(data, container_name=None):
|
||||
name = data.name
|
||||
local_data = data.make_local()
|
||||
local_data.name = f"{container_name}:{name}"
|
||||
if container_name:
|
||||
local_data.name = f"{container_name}:{name}"
|
||||
else:
|
||||
local_data.name = f"{name}"
|
||||
return local_data
|
||||
|
||||
|
||||
|
|
@ -119,11 +129,29 @@ def deselect_all():
|
|||
bpy.context.view_layer.objects.active = active
|
||||
|
||||
|
||||
class Creator(PypeCreatorMixin, blender.Creator):
|
||||
pass
|
||||
class Creator(LegacyCreator):
|
||||
"""Base class for Creator plug-ins."""
|
||||
defaults = ['Main']
|
||||
|
||||
def process(self):
|
||||
collection = bpy.data.collections.new(name=self.data["subset"])
|
||||
bpy.context.scene.collection.children.link(collection)
|
||||
imprint(collection, self.data)
|
||||
|
||||
if (self.options or {}).get("useSelection"):
|
||||
for obj in get_selection():
|
||||
collection.objects.link(obj)
|
||||
|
||||
return collection
|
||||
|
||||
|
||||
class AssetLoader(api.Loader):
|
||||
class Loader(avalon.api.Loader):
|
||||
"""Base class for Loader plug-ins."""
|
||||
|
||||
hosts = ["blender"]
|
||||
|
||||
|
||||
class AssetLoader(avalon.api.Loader):
|
||||
"""A basic AssetLoader for Blender
|
||||
|
||||
This will implement the basic logic for linking/appending assets
|
||||
|
|
@ -191,8 +219,8 @@ class AssetLoader(api.Loader):
|
|||
namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None) -> Optional[bpy.types.Collection]:
|
||||
""" Run the loader on Blender main thread"""
|
||||
mti = ops.MainThreadItem(self._load, context, name, namespace, options)
|
||||
ops.execute_in_main_thread(mti)
|
||||
mti = MainThreadItem(self._load, context, name, namespace, options)
|
||||
execute_in_main_thread(mti)
|
||||
|
||||
def _load(self,
|
||||
context: dict,
|
||||
|
|
@ -257,8 +285,8 @@ class AssetLoader(api.Loader):
|
|||
|
||||
def update(self, container: Dict, representation: Dict):
|
||||
""" Run the update on Blender main thread"""
|
||||
mti = ops.MainThreadItem(self.exec_update, container, representation)
|
||||
ops.execute_in_main_thread(mti)
|
||||
mti = MainThreadItem(self.exec_update, container, representation)
|
||||
execute_in_main_thread(mti)
|
||||
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
"""Must be implemented by a sub-class"""
|
||||
|
|
@ -266,5 +294,5 @@ class AssetLoader(api.Loader):
|
|||
|
||||
def remove(self, container: Dict) -> bool:
|
||||
""" Run the remove on Blender main thread"""
|
||||
mti = ops.MainThreadItem(self.exec_remove, container)
|
||||
ops.execute_in_main_thread(mti)
|
||||
mti = MainThreadItem(self.exec_remove, container)
|
||||
execute_in_main_thread(mti)
|
||||
|
|
|
|||
90
openpype/hosts/blender/api/workio.py
Normal file
90
openpype/hosts/blender/api/workio.py
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
"""Host API required for Work Files."""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
import bpy
|
||||
from avalon import api
|
||||
|
||||
|
||||
class OpenFileCacher:
|
||||
"""Store information about opening file.
|
||||
|
||||
When file is opening QApplcation events should not be processed.
|
||||
"""
|
||||
opening_file = False
|
||||
|
||||
@classmethod
|
||||
def post_load(cls):
|
||||
cls.opening_file = False
|
||||
|
||||
@classmethod
|
||||
def set_opening(cls):
|
||||
cls.opening_file = True
|
||||
|
||||
|
||||
def open_file(filepath: str) -> Optional[str]:
|
||||
"""Open the scene file in Blender."""
|
||||
OpenFileCacher.set_opening()
|
||||
|
||||
preferences = bpy.context.preferences
|
||||
load_ui = preferences.filepaths.use_load_ui
|
||||
use_scripts = preferences.filepaths.use_scripts_auto_execute
|
||||
result = bpy.ops.wm.open_mainfile(
|
||||
filepath=filepath,
|
||||
load_ui=load_ui,
|
||||
use_scripts=use_scripts,
|
||||
)
|
||||
|
||||
if result == {'FINISHED'}:
|
||||
return filepath
|
||||
return None
|
||||
|
||||
|
||||
def save_file(filepath: str, copy: bool = False) -> Optional[str]:
|
||||
"""Save the open scene file."""
|
||||
|
||||
preferences = bpy.context.preferences
|
||||
compress = preferences.filepaths.use_file_compression
|
||||
relative_remap = preferences.filepaths.use_relative_paths
|
||||
result = bpy.ops.wm.save_as_mainfile(
|
||||
filepath=filepath,
|
||||
compress=compress,
|
||||
relative_remap=relative_remap,
|
||||
copy=copy,
|
||||
)
|
||||
|
||||
if result == {'FINISHED'}:
|
||||
return filepath
|
||||
return None
|
||||
|
||||
|
||||
def current_file() -> Optional[str]:
|
||||
"""Return the path of the open scene file."""
|
||||
|
||||
current_filepath = bpy.data.filepath
|
||||
if Path(current_filepath).is_file():
|
||||
return current_filepath
|
||||
return None
|
||||
|
||||
|
||||
def has_unsaved_changes() -> bool:
|
||||
"""Does the open scene file have unsaved changes?"""
|
||||
|
||||
return bpy.data.is_dirty
|
||||
|
||||
|
||||
def file_extensions() -> List[str]:
|
||||
"""Return the supported file extensions for Blender scene files."""
|
||||
|
||||
return api.HOST_WORKFILE_EXTENSIONS["blender"]
|
||||
|
||||
|
||||
def work_root(session: dict) -> str:
|
||||
"""Return the default root to browse for work files."""
|
||||
|
||||
work_dir = session["AVALON_WORKDIR"]
|
||||
scene_dir = session.get("AVALON_SCENEDIR")
|
||||
if scene_dir:
|
||||
return str(Path(work_dir, scene_dir))
|
||||
return work_dir
|
||||
4
openpype/hosts/blender/blender_addon/startup/init.py
Normal file
4
openpype/hosts/blender/blender_addon/startup/init.py
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
from avalon import pipeline
|
||||
from openpype.hosts.blender import api
|
||||
|
||||
pipeline.install(api)
|
||||
|
|
@ -4,7 +4,7 @@ import bpy
|
|||
|
||||
from avalon import api
|
||||
import openpype.hosts.blender.api.plugin
|
||||
from avalon.blender import lib
|
||||
from openpype.hosts.blender.api import lib
|
||||
|
||||
|
||||
class CreateAction(openpype.hosts.blender.api.plugin.Creator):
|
||||
|
|
|
|||
|
|
@ -3,9 +3,8 @@
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender import lib, ops
|
||||
from avalon.blender.pipeline import AVALON_INSTANCES
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api import plugin, lib, ops
|
||||
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
|
||||
|
||||
|
||||
class CreateAnimation(plugin.Creator):
|
||||
|
|
|
|||
|
|
@ -3,9 +3,8 @@
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender import lib, ops
|
||||
from avalon.blender.pipeline import AVALON_INSTANCES
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api import plugin, lib, ops
|
||||
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
|
||||
|
||||
|
||||
class CreateCamera(plugin.Creator):
|
||||
|
|
|
|||
|
|
@ -3,9 +3,8 @@
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender import lib, ops
|
||||
from avalon.blender.pipeline import AVALON_INSTANCES
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api import plugin, lib, ops
|
||||
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
|
||||
|
||||
|
||||
class CreateLayout(plugin.Creator):
|
||||
|
|
|
|||
|
|
@ -3,9 +3,8 @@
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender import lib, ops
|
||||
from avalon.blender.pipeline import AVALON_INSTANCES
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api import plugin, lib, ops
|
||||
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
|
||||
|
||||
|
||||
class CreateModel(plugin.Creator):
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender import lib
|
||||
import openpype.hosts.blender.api.plugin
|
||||
from openpype.hosts.blender.api import lib
|
||||
|
||||
|
||||
class CreatePointcache(openpype.hosts.blender.api.plugin.Creator):
|
||||
|
|
|
|||
|
|
@ -3,9 +3,8 @@
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender import lib, ops
|
||||
from avalon.blender.pipeline import AVALON_INSTANCES
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api import plugin, lib, ops
|
||||
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
|
||||
|
||||
|
||||
class CreateRig(plugin.Creator):
|
||||
|
|
|
|||
|
|
@ -7,11 +7,12 @@ from typing import Dict, List, Optional
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender import lib
|
||||
from avalon.blender.pipeline import AVALON_CONTAINERS
|
||||
from avalon.blender.pipeline import AVALON_CONTAINER_ID
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api.pipeline import (
|
||||
AVALON_CONTAINERS,
|
||||
AVALON_PROPERTY,
|
||||
AVALON_CONTAINER_ID
|
||||
)
|
||||
from openpype.hosts.blender.api import plugin, lib
|
||||
|
||||
|
||||
class CacheModelLoader(plugin.AssetLoader):
|
||||
|
|
|
|||
|
|
@ -1,16 +1,11 @@
|
|||
"""Load an animation in Blender."""
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import bpy
|
||||
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from openpype.hosts.blender.api import plugin
|
||||
|
||||
|
||||
logger = logging.getLogger("openpype").getChild(
|
||||
"blender").getChild("load_animation")
|
||||
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
|
||||
|
||||
|
||||
class BlendAnimationLoader(plugin.AssetLoader):
|
||||
|
|
|
|||
|
|
@ -7,10 +7,12 @@ from typing import Dict, List, Optional
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender.pipeline import AVALON_CONTAINERS
|
||||
from avalon.blender.pipeline import AVALON_CONTAINER_ID
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api.pipeline import (
|
||||
AVALON_CONTAINERS,
|
||||
AVALON_PROPERTY,
|
||||
AVALON_CONTAINER_ID
|
||||
)
|
||||
|
||||
|
||||
class AudioLoader(plugin.AssetLoader):
|
||||
|
|
|
|||
|
|
@ -8,10 +8,12 @@ from typing import Dict, List, Optional
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender.pipeline import AVALON_CONTAINERS
|
||||
from avalon.blender.pipeline import AVALON_CONTAINER_ID
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api.pipeline import (
|
||||
AVALON_CONTAINERS,
|
||||
AVALON_PROPERTY,
|
||||
AVALON_CONTAINER_ID
|
||||
)
|
||||
|
||||
logger = logging.getLogger("openpype").getChild(
|
||||
"blender").getChild("load_camera")
|
||||
|
|
|
|||
|
|
@ -7,11 +7,12 @@ from typing import Dict, List, Optional
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender import lib
|
||||
from avalon.blender.pipeline import AVALON_CONTAINERS
|
||||
from avalon.blender.pipeline import AVALON_CONTAINER_ID
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api import plugin, lib
|
||||
from openpype.hosts.blender.api.pipeline import (
|
||||
AVALON_CONTAINERS,
|
||||
AVALON_PROPERTY,
|
||||
AVALON_CONTAINER_ID
|
||||
)
|
||||
|
||||
|
||||
class FbxCameraLoader(plugin.AssetLoader):
|
||||
|
|
|
|||
|
|
@ -7,11 +7,12 @@ from typing import Dict, List, Optional
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender import lib
|
||||
from avalon.blender.pipeline import AVALON_CONTAINERS
|
||||
from avalon.blender.pipeline import AVALON_CONTAINER_ID
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api import plugin, lib
|
||||
from openpype.hosts.blender.api.pipeline import (
|
||||
AVALON_CONTAINERS,
|
||||
AVALON_PROPERTY,
|
||||
AVALON_CONTAINER_ID
|
||||
)
|
||||
|
||||
|
||||
class FbxModelLoader(plugin.AssetLoader):
|
||||
|
|
|
|||
|
|
@ -7,10 +7,14 @@ from typing import Dict, List, Optional
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender.pipeline import AVALON_CONTAINERS
|
||||
from avalon.blender.pipeline import AVALON_CONTAINER_ID
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from openpype import lib
|
||||
from openpype.pipeline import legacy_create
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api.pipeline import (
|
||||
AVALON_CONTAINERS,
|
||||
AVALON_PROPERTY,
|
||||
AVALON_CONTAINER_ID
|
||||
)
|
||||
|
||||
|
||||
class BlendLayoutLoader(plugin.AssetLoader):
|
||||
|
|
@ -59,7 +63,9 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
|||
library = bpy.data.libraries.get(bpy.path.basename(libpath))
|
||||
bpy.data.libraries.remove(library)
|
||||
|
||||
def _process(self, libpath, asset_group, group_name, actions):
|
||||
def _process(
|
||||
self, libpath, asset_group, group_name, asset, representation, actions
|
||||
):
|
||||
with bpy.data.libraries.load(
|
||||
libpath, link=True, relative=False
|
||||
) as (data_from, data_to):
|
||||
|
|
@ -72,7 +78,8 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
|||
container = None
|
||||
|
||||
for empty in empties:
|
||||
if empty.get(AVALON_PROPERTY):
|
||||
if (empty.get(AVALON_PROPERTY) and
|
||||
empty.get(AVALON_PROPERTY).get('family') == 'layout'):
|
||||
container = empty
|
||||
break
|
||||
|
||||
|
|
@ -83,12 +90,16 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
|||
objects = []
|
||||
nodes = list(container.children)
|
||||
|
||||
for obj in nodes:
|
||||
obj.parent = asset_group
|
||||
allowed_types = ['ARMATURE', 'MESH', 'EMPTY']
|
||||
|
||||
for obj in nodes:
|
||||
objects.append(obj)
|
||||
nodes.extend(list(obj.children))
|
||||
if obj.type in allowed_types:
|
||||
obj.parent = asset_group
|
||||
|
||||
for obj in nodes:
|
||||
if obj.type in allowed_types:
|
||||
objects.append(obj)
|
||||
nodes.extend(list(obj.children))
|
||||
|
||||
objects.reverse()
|
||||
|
||||
|
|
@ -106,7 +117,7 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
|||
parent.objects.link(obj)
|
||||
|
||||
for obj in objects:
|
||||
local_obj = plugin.prepare_data(obj, group_name)
|
||||
local_obj = plugin.prepare_data(obj)
|
||||
|
||||
action = None
|
||||
|
||||
|
|
@ -114,7 +125,7 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
|||
action = actions.get(local_obj.name, None)
|
||||
|
||||
if local_obj.type == 'MESH':
|
||||
plugin.prepare_data(local_obj.data, group_name)
|
||||
plugin.prepare_data(local_obj.data)
|
||||
|
||||
if obj != local_obj:
|
||||
for constraint in constraints:
|
||||
|
|
@ -123,15 +134,18 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
|||
|
||||
for material_slot in local_obj.material_slots:
|
||||
if material_slot.material:
|
||||
plugin.prepare_data(material_slot.material, group_name)
|
||||
plugin.prepare_data(material_slot.material)
|
||||
elif local_obj.type == 'ARMATURE':
|
||||
plugin.prepare_data(local_obj.data, group_name)
|
||||
plugin.prepare_data(local_obj.data)
|
||||
|
||||
if action is not None:
|
||||
if local_obj.animation_data is None:
|
||||
local_obj.animation_data_create()
|
||||
local_obj.animation_data.action = action
|
||||
elif local_obj.animation_data.action is not None:
|
||||
elif (local_obj.animation_data and
|
||||
local_obj.animation_data.action is not None):
|
||||
plugin.prepare_data(
|
||||
local_obj.animation_data.action, group_name)
|
||||
local_obj.animation_data.action)
|
||||
|
||||
# Set link the drivers to the local object
|
||||
if local_obj.data.animation_data:
|
||||
|
|
@ -140,6 +154,21 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
|||
for t in v.targets:
|
||||
t.id = local_obj
|
||||
|
||||
elif local_obj.type == 'EMPTY':
|
||||
creator_plugin = lib.get_creator_by_name("CreateAnimation")
|
||||
if not creator_plugin:
|
||||
raise ValueError("Creator plugin \"CreateAnimation\" was "
|
||||
"not found.")
|
||||
|
||||
legacy_create(
|
||||
creator_plugin,
|
||||
name=local_obj.name.split(':')[-1] + "_animation",
|
||||
asset=asset,
|
||||
options={"useSelection": False,
|
||||
"asset_group": local_obj},
|
||||
data={"dependencies": representation}
|
||||
)
|
||||
|
||||
if not local_obj.get(AVALON_PROPERTY):
|
||||
local_obj[AVALON_PROPERTY] = dict()
|
||||
|
||||
|
|
@ -148,7 +177,63 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
|||
|
||||
objects.reverse()
|
||||
|
||||
bpy.data.orphans_purge(do_local_ids=False)
|
||||
armatures = [
|
||||
obj for obj in bpy.data.objects
|
||||
if obj.type == 'ARMATURE' and obj.library is None]
|
||||
arm_act = {}
|
||||
|
||||
# The armatures with an animation need to be at the center of the
|
||||
# scene to be hooked correctly by the curves modifiers.
|
||||
for armature in armatures:
|
||||
if armature.animation_data and armature.animation_data.action:
|
||||
arm_act[armature] = armature.animation_data.action
|
||||
armature.animation_data.action = None
|
||||
armature.location = (0.0, 0.0, 0.0)
|
||||
for bone in armature.pose.bones:
|
||||
bone.location = (0.0, 0.0, 0.0)
|
||||
bone.rotation_euler = (0.0, 0.0, 0.0)
|
||||
|
||||
curves = [obj for obj in data_to.objects if obj.type == 'CURVE']
|
||||
|
||||
for curve in curves:
|
||||
curve_name = curve.name.split(':')[0]
|
||||
curve_obj = bpy.data.objects.get(curve_name)
|
||||
|
||||
local_obj = plugin.prepare_data(curve)
|
||||
plugin.prepare_data(local_obj.data)
|
||||
|
||||
# Curves need to reset the hook, but to do that they need to be
|
||||
# in the view layer.
|
||||
parent.objects.link(local_obj)
|
||||
plugin.deselect_all()
|
||||
local_obj.select_set(True)
|
||||
bpy.context.view_layer.objects.active = local_obj
|
||||
if local_obj.library is None:
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
bpy.ops.object.hook_reset()
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
parent.objects.unlink(local_obj)
|
||||
|
||||
local_obj.use_fake_user = True
|
||||
|
||||
for mod in local_obj.modifiers:
|
||||
mod.object = bpy.data.objects.get(f"{mod.object.name}")
|
||||
|
||||
if not local_obj.get(AVALON_PROPERTY):
|
||||
local_obj[AVALON_PROPERTY] = dict()
|
||||
|
||||
avalon_info = local_obj[AVALON_PROPERTY]
|
||||
avalon_info.update({"container_name": group_name})
|
||||
|
||||
local_obj.parent = curve_obj
|
||||
objects.append(local_obj)
|
||||
|
||||
for armature in armatures:
|
||||
if arm_act.get(armature):
|
||||
armature.animation_data.action = arm_act[armature]
|
||||
|
||||
while bpy.data.orphans_purge(do_local_ids=False):
|
||||
pass
|
||||
|
||||
plugin.deselect_all()
|
||||
|
||||
|
|
@ -168,6 +253,7 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
|||
libpath = self.fname
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
representation = str(context["representation"]["_id"])
|
||||
|
||||
asset_name = plugin.asset_name(asset, subset)
|
||||
unique_number = plugin.get_unique_number(asset, subset)
|
||||
|
|
@ -183,7 +269,8 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
|||
asset_group.empty_display_type = 'SINGLE_ARROW'
|
||||
avalon_container.objects.link(asset_group)
|
||||
|
||||
objects = self._process(libpath, asset_group, group_name, None)
|
||||
objects = self._process(
|
||||
libpath, asset_group, group_name, asset, representation, None)
|
||||
|
||||
for child in asset_group.children:
|
||||
if child.get(AVALON_PROPERTY):
|
||||
|
|
|
|||
|
|
@ -1,18 +1,19 @@
|
|||
"""Load a layout in Blender."""
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
from typing import Dict, Optional
|
||||
|
||||
import bpy
|
||||
import json
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender.pipeline import AVALON_CONTAINERS
|
||||
from avalon.blender.pipeline import AVALON_CONTAINER_ID
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from avalon.blender.pipeline import AVALON_INSTANCES
|
||||
from openpype import lib
|
||||
from openpype.hosts.blender.api.pipeline import (
|
||||
AVALON_INSTANCES,
|
||||
AVALON_CONTAINERS,
|
||||
AVALON_PROPERTY,
|
||||
AVALON_CONTAINER_ID
|
||||
)
|
||||
from openpype.hosts.blender.api import plugin
|
||||
|
||||
|
||||
|
|
@ -92,6 +93,10 @@ class JsonLayoutLoader(plugin.AssetLoader):
|
|||
'animation_asset': asset
|
||||
}
|
||||
|
||||
if element.get('animation'):
|
||||
options['animation_file'] = str(Path(libpath).with_suffix(
|
||||
'')) + "." + element.get('animation')
|
||||
|
||||
# This should return the loaded asset, but the load call will be
|
||||
# added to the queue to run in the Blender main thread, so
|
||||
# at this time it will not return anything. The assets will be
|
||||
|
|
@ -104,20 +109,22 @@ class JsonLayoutLoader(plugin.AssetLoader):
|
|||
options=options
|
||||
)
|
||||
|
||||
# Create the camera asset and the camera instance
|
||||
creator_plugin = lib.get_creator_by_name("CreateCamera")
|
||||
if not creator_plugin:
|
||||
raise ValueError("Creator plugin \"CreateCamera\" was "
|
||||
"not found.")
|
||||
# Camera creation when loading a layout is not necessary for now,
|
||||
# but the code is worth keeping in case we need it in the future.
|
||||
# # Create the camera asset and the camera instance
|
||||
# creator_plugin = lib.get_creator_by_name("CreateCamera")
|
||||
# if not creator_plugin:
|
||||
# raise ValueError("Creator plugin \"CreateCamera\" was "
|
||||
# "not found.")
|
||||
|
||||
api.create(
|
||||
creator_plugin,
|
||||
name="camera",
|
||||
# name=f"{unique_number}_{subset}_animation",
|
||||
asset=asset,
|
||||
options={"useSelection": False}
|
||||
# data={"dependencies": str(context["representation"]["_id"])}
|
||||
)
|
||||
# legacy_create(
|
||||
# creator_plugin,
|
||||
# name="camera",
|
||||
# # name=f"{unique_number}_{subset}_animation",
|
||||
# asset=asset,
|
||||
# options={"useSelection": False}
|
||||
# # data={"dependencies": str(context["representation"]["_id"])}
|
||||
# )
|
||||
|
||||
def process_asset(self,
|
||||
context: dict,
|
||||
|
|
|
|||
|
|
@ -8,8 +8,12 @@ import os
|
|||
import json
|
||||
import bpy
|
||||
|
||||
from avalon import api, blender
|
||||
import openpype.hosts.blender.api.plugin as plugin
|
||||
from avalon import api
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api.pipeline import (
|
||||
containerise_existing,
|
||||
AVALON_PROPERTY
|
||||
)
|
||||
|
||||
|
||||
class BlendLookLoader(plugin.AssetLoader):
|
||||
|
|
@ -105,7 +109,7 @@ class BlendLookLoader(plugin.AssetLoader):
|
|||
|
||||
container = bpy.data.collections.new(lib_container)
|
||||
container.name = container_name
|
||||
blender.pipeline.containerise_existing(
|
||||
containerise_existing(
|
||||
container,
|
||||
name,
|
||||
namespace,
|
||||
|
|
@ -113,7 +117,7 @@ class BlendLookLoader(plugin.AssetLoader):
|
|||
self.__class__.__name__,
|
||||
)
|
||||
|
||||
metadata = container.get(blender.pipeline.AVALON_PROPERTY)
|
||||
metadata = container.get(AVALON_PROPERTY)
|
||||
|
||||
metadata["libpath"] = libpath
|
||||
metadata["lib_container"] = lib_container
|
||||
|
|
@ -161,7 +165,7 @@ class BlendLookLoader(plugin.AssetLoader):
|
|||
f"Unsupported file: {libpath}"
|
||||
)
|
||||
|
||||
collection_metadata = collection.get(blender.pipeline.AVALON_PROPERTY)
|
||||
collection_metadata = collection.get(AVALON_PROPERTY)
|
||||
collection_libpath = collection_metadata["libpath"]
|
||||
|
||||
normalized_collection_libpath = (
|
||||
|
|
@ -204,7 +208,7 @@ class BlendLookLoader(plugin.AssetLoader):
|
|||
if not collection:
|
||||
return False
|
||||
|
||||
collection_metadata = collection.get(blender.pipeline.AVALON_PROPERTY)
|
||||
collection_metadata = collection.get(AVALON_PROPERTY)
|
||||
|
||||
for obj in collection_metadata['objects']:
|
||||
for child in self.get_all_children(obj):
|
||||
|
|
|
|||
|
|
@ -7,10 +7,12 @@ from typing import Dict, List, Optional
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender.pipeline import AVALON_CONTAINERS
|
||||
from avalon.blender.pipeline import AVALON_CONTAINER_ID
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api.pipeline import (
|
||||
AVALON_CONTAINERS,
|
||||
AVALON_PROPERTY,
|
||||
AVALON_CONTAINER_ID
|
||||
)
|
||||
|
||||
|
||||
class BlendModelLoader(plugin.AssetLoader):
|
||||
|
|
@ -81,7 +83,8 @@ class BlendModelLoader(plugin.AssetLoader):
|
|||
plugin.prepare_data(local_obj.data, group_name)
|
||||
|
||||
for material_slot in local_obj.material_slots:
|
||||
plugin.prepare_data(material_slot.material, group_name)
|
||||
if material_slot.material:
|
||||
plugin.prepare_data(material_slot.material, group_name)
|
||||
|
||||
if not local_obj.get(AVALON_PROPERTY):
|
||||
local_obj[AVALON_PROPERTY] = dict()
|
||||
|
|
@ -245,7 +248,8 @@ class BlendModelLoader(plugin.AssetLoader):
|
|||
# If it is the last object to use that library, remove it
|
||||
if count == 1:
|
||||
library = bpy.data.libraries.get(bpy.path.basename(group_libpath))
|
||||
bpy.data.libraries.remove(library)
|
||||
if library:
|
||||
bpy.data.libraries.remove(library)
|
||||
|
||||
self._process(str(libpath), asset_group, object_name)
|
||||
|
||||
|
|
@ -253,6 +257,7 @@ class BlendModelLoader(plugin.AssetLoader):
|
|||
|
||||
metadata["libpath"] = str(libpath)
|
||||
metadata["representation"] = str(representation["_id"])
|
||||
metadata["parent"] = str(representation["parent"])
|
||||
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
|
|
|||
|
|
@ -7,11 +7,15 @@ from typing import Dict, List, Optional
|
|||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender.pipeline import AVALON_CONTAINERS
|
||||
from avalon.blender.pipeline import AVALON_CONTAINER_ID
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from avalon.blender import lib as avalon_lib
|
||||
from openpype import lib
|
||||
from openpype.pipeline import legacy_create
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api.pipeline import (
|
||||
AVALON_CONTAINERS,
|
||||
AVALON_PROPERTY,
|
||||
AVALON_CONTAINER_ID
|
||||
)
|
||||
|
||||
|
||||
class BlendRigLoader(plugin.AssetLoader):
|
||||
|
|
@ -110,6 +114,8 @@ class BlendRigLoader(plugin.AssetLoader):
|
|||
plugin.prepare_data(local_obj.data, group_name)
|
||||
|
||||
if action is not None:
|
||||
if local_obj.animation_data is None:
|
||||
local_obj.animation_data_create()
|
||||
local_obj.animation_data.action = action
|
||||
elif (local_obj.animation_data and
|
||||
local_obj.animation_data.action is not None):
|
||||
|
|
@ -194,12 +200,14 @@ class BlendRigLoader(plugin.AssetLoader):
|
|||
plugin.deselect_all()
|
||||
|
||||
create_animation = False
|
||||
anim_file = None
|
||||
|
||||
if options is not None:
|
||||
parent = options.get('parent')
|
||||
transform = options.get('transform')
|
||||
action = options.get('action')
|
||||
create_animation = options.get('create_animation')
|
||||
anim_file = options.get('animation_file')
|
||||
|
||||
if parent and transform:
|
||||
location = transform.get('translation')
|
||||
|
|
@ -241,7 +249,7 @@ class BlendRigLoader(plugin.AssetLoader):
|
|||
|
||||
animation_asset = options.get('animation_asset')
|
||||
|
||||
api.create(
|
||||
legacy_create(
|
||||
creator_plugin,
|
||||
name=namespace + "_animation",
|
||||
# name=f"{unique_number}_{subset}_animation",
|
||||
|
|
@ -252,6 +260,26 @@ class BlendRigLoader(plugin.AssetLoader):
|
|||
|
||||
plugin.deselect_all()
|
||||
|
||||
if anim_file:
|
||||
bpy.ops.import_scene.fbx(filepath=anim_file, anim_offset=0.0)
|
||||
|
||||
imported = avalon_lib.get_selection()
|
||||
|
||||
armature = [
|
||||
o for o in asset_group.children if o.type == 'ARMATURE'][0]
|
||||
|
||||
imported_group = [
|
||||
o for o in imported if o.type == 'EMPTY'][0]
|
||||
|
||||
for obj in imported:
|
||||
if obj.type == 'ARMATURE':
|
||||
if not armature.animation_data:
|
||||
armature.animation_data_create()
|
||||
armature.animation_data.action = obj.animation_data.action
|
||||
|
||||
self._remove(imported_group)
|
||||
bpy.data.objects.remove(imported_group)
|
||||
|
||||
bpy.context.scene.collection.objects.link(asset_group)
|
||||
|
||||
asset_group[AVALON_PROPERTY] = {
|
||||
|
|
@ -348,6 +376,7 @@ class BlendRigLoader(plugin.AssetLoader):
|
|||
|
||||
metadata["libpath"] = str(libpath)
|
||||
metadata["representation"] = str(representation["_id"])
|
||||
metadata["parent"] = str(representation["parent"])
|
||||
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing asset group from a Blender scene.
|
||||
|
|
|
|||
|
|
@ -1,11 +1,13 @@
|
|||
import json
|
||||
from typing import Generator
|
||||
|
||||
import bpy
|
||||
import json
|
||||
|
||||
import pyblish.api
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from avalon.blender.pipeline import AVALON_INSTANCES
|
||||
from openpype.hosts.blender.api.pipeline import (
|
||||
AVALON_INSTANCES,
|
||||
AVALON_PROPERTY,
|
||||
)
|
||||
|
||||
|
||||
class CollectInstances(pyblish.api.ContextPlugin):
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import os
|
||||
|
||||
import bpy
|
||||
|
||||
from openpype import api
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
|
||||
import bpy
|
||||
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
|
||||
|
||||
|
||||
class ExtractABC(api.Extractor):
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ import os
|
|||
|
||||
import bpy
|
||||
|
||||
# import avalon.blender.workio
|
||||
import openpype.api
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -29,12 +29,13 @@ class ExtractBlendAnimation(openpype.api.Extractor):
|
|||
if isinstance(obj, bpy.types.Object) and obj.type == 'EMPTY':
|
||||
child = obj.children[0]
|
||||
if child and child.type == 'ARMATURE':
|
||||
if not obj.animation_data:
|
||||
obj.animation_data_create()
|
||||
obj.animation_data.action = child.animation_data.action
|
||||
obj.animation_data_clear()
|
||||
data_blocks.add(child.animation_data.action)
|
||||
data_blocks.add(obj)
|
||||
if child.animation_data and child.animation_data.action:
|
||||
if not obj.animation_data:
|
||||
obj.animation_data_create()
|
||||
obj.animation_data.action = child.animation_data.action
|
||||
obj.animation_data_clear()
|
||||
data_blocks.add(child.animation_data.action)
|
||||
data_blocks.add(obj)
|
||||
|
||||
bpy.data.libraries.write(filepath, data_blocks)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import os
|
||||
|
||||
import bpy
|
||||
|
||||
from openpype import api
|
||||
from openpype.hosts.blender.api import plugin
|
||||
|
||||
import bpy
|
||||
|
||||
|
||||
class ExtractCamera(api.Extractor):
|
||||
"""Extract as the camera as FBX."""
|
||||
|
|
@ -50,6 +50,10 @@ class ExtractCamera(api.Extractor):
|
|||
filepath=filepath,
|
||||
use_active_collection=False,
|
||||
use_selection=True,
|
||||
bake_anim_use_nla_strips=False,
|
||||
bake_anim_use_all_actions=False,
|
||||
add_leaf_bones=False,
|
||||
armature_nodetype='ROOT',
|
||||
object_types={'CAMERA'},
|
||||
bake_anim_simplify_factor=0.0
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import os
|
||||
|
||||
import bpy
|
||||
|
||||
from openpype import api
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
|
||||
import bpy
|
||||
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
|
||||
|
||||
|
||||
class ExtractFBX(api.Extractor):
|
||||
|
|
@ -50,6 +50,9 @@ class ExtractFBX(api.Extractor):
|
|||
new_materials.append(mat)
|
||||
new_materials_objs.append(obj)
|
||||
|
||||
scale_length = bpy.context.scene.unit_settings.scale_length
|
||||
bpy.context.scene.unit_settings.scale_length = 0.01
|
||||
|
||||
# We export the fbx
|
||||
bpy.ops.export_scene.fbx(
|
||||
context,
|
||||
|
|
@ -60,6 +63,8 @@ class ExtractFBX(api.Extractor):
|
|||
add_leaf_bones=False
|
||||
)
|
||||
|
||||
bpy.context.scene.unit_settings.scale_length = scale_length
|
||||
|
||||
plugin.deselect_all()
|
||||
|
||||
for mat in new_materials:
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import bpy_extras.anim_utils
|
|||
|
||||
from openpype import api
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
|
||||
|
||||
|
||||
class ExtractAnimationFBX(api.Extractor):
|
||||
|
|
@ -37,13 +37,6 @@ class ExtractAnimationFBX(api.Extractor):
|
|||
armature = [
|
||||
obj for obj in asset_group.children if obj.type == 'ARMATURE'][0]
|
||||
|
||||
asset_group_name = asset_group.name
|
||||
asset_group.name = asset_group.get(AVALON_PROPERTY).get("asset_name")
|
||||
|
||||
armature_name = armature.name
|
||||
original_name = armature_name.split(':')[1]
|
||||
armature.name = original_name
|
||||
|
||||
object_action_pairs = []
|
||||
original_actions = []
|
||||
|
||||
|
|
@ -66,6 +59,13 @@ class ExtractAnimationFBX(api.Extractor):
|
|||
self.log.info("Object have no animation.")
|
||||
return
|
||||
|
||||
asset_group_name = asset_group.name
|
||||
asset_group.name = asset_group.get(AVALON_PROPERTY).get("asset_name")
|
||||
|
||||
armature_name = armature.name
|
||||
original_name = armature_name.split(':')[1]
|
||||
armature.name = original_name
|
||||
|
||||
object_action_pairs.append((armature, copy_action))
|
||||
original_actions.append(curr_action)
|
||||
|
||||
|
|
@ -123,7 +123,7 @@ class ExtractAnimationFBX(api.Extractor):
|
|||
json_path = os.path.join(stagingdir, json_filename)
|
||||
|
||||
json_dict = {
|
||||
"instance_name": asset_group.get(AVALON_PROPERTY).get("namespace")
|
||||
"instance_name": asset_group.get(AVALON_PROPERTY).get("objectName")
|
||||
}
|
||||
|
||||
# collection = instance.data.get("name")
|
||||
|
|
|
|||
|
|
@ -2,9 +2,12 @@ import os
|
|||
import json
|
||||
|
||||
import bpy
|
||||
import bpy_extras
|
||||
import bpy_extras.anim_utils
|
||||
|
||||
from avalon import io
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
|
||||
import openpype.api
|
||||
|
||||
|
||||
|
|
@ -16,6 +19,99 @@ class ExtractLayout(openpype.api.Extractor):
|
|||
families = ["layout"]
|
||||
optional = True
|
||||
|
||||
def _export_animation(self, asset, instance, stagingdir, fbx_count):
|
||||
n = fbx_count
|
||||
|
||||
for obj in asset.children:
|
||||
if obj.type != "ARMATURE":
|
||||
continue
|
||||
|
||||
object_action_pairs = []
|
||||
original_actions = []
|
||||
|
||||
starting_frames = []
|
||||
ending_frames = []
|
||||
|
||||
# For each armature, we make a copy of the current action
|
||||
curr_action = None
|
||||
copy_action = None
|
||||
|
||||
if obj.animation_data and obj.animation_data.action:
|
||||
curr_action = obj.animation_data.action
|
||||
copy_action = curr_action.copy()
|
||||
|
||||
curr_frame_range = curr_action.frame_range
|
||||
|
||||
starting_frames.append(curr_frame_range[0])
|
||||
ending_frames.append(curr_frame_range[1])
|
||||
else:
|
||||
self.log.info("Object have no animation.")
|
||||
continue
|
||||
|
||||
asset_group_name = asset.name
|
||||
asset.name = asset.get(AVALON_PROPERTY).get("asset_name")
|
||||
|
||||
armature_name = obj.name
|
||||
original_name = armature_name.split(':')[1]
|
||||
obj.name = original_name
|
||||
|
||||
object_action_pairs.append((obj, copy_action))
|
||||
original_actions.append(curr_action)
|
||||
|
||||
# We compute the starting and ending frames
|
||||
max_frame = min(starting_frames)
|
||||
min_frame = max(ending_frames)
|
||||
|
||||
# We bake the copy of the current action for each object
|
||||
bpy_extras.anim_utils.bake_action_objects(
|
||||
object_action_pairs,
|
||||
frames=range(int(min_frame), int(max_frame)),
|
||||
do_object=False,
|
||||
do_clean=False
|
||||
)
|
||||
|
||||
for o in bpy.data.objects:
|
||||
o.select_set(False)
|
||||
|
||||
asset.select_set(True)
|
||||
obj.select_set(True)
|
||||
fbx_filename = f"{n:03d}.fbx"
|
||||
filepath = os.path.join(stagingdir, fbx_filename)
|
||||
|
||||
override = plugin.create_blender_context(
|
||||
active=asset, selected=[asset, obj])
|
||||
bpy.ops.export_scene.fbx(
|
||||
override,
|
||||
filepath=filepath,
|
||||
use_active_collection=False,
|
||||
use_selection=True,
|
||||
bake_anim_use_nla_strips=False,
|
||||
bake_anim_use_all_actions=False,
|
||||
add_leaf_bones=False,
|
||||
armature_nodetype='ROOT',
|
||||
object_types={'EMPTY', 'ARMATURE'}
|
||||
)
|
||||
obj.name = armature_name
|
||||
asset.name = asset_group_name
|
||||
asset.select_set(False)
|
||||
obj.select_set(False)
|
||||
|
||||
# We delete the baked action and set the original one back
|
||||
for i in range(0, len(object_action_pairs)):
|
||||
pair = object_action_pairs[i]
|
||||
action = original_actions[i]
|
||||
|
||||
if action:
|
||||
pair[0].animation_data.action = action
|
||||
|
||||
if pair[1]:
|
||||
pair[1].user_clear()
|
||||
bpy.data.actions.remove(pair[1])
|
||||
|
||||
return fbx_filename, n + 1
|
||||
|
||||
return None, n
|
||||
|
||||
def process(self, instance):
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
|
|
@ -23,10 +119,16 @@ class ExtractLayout(openpype.api.Extractor):
|
|||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
json_data = []
|
||||
fbx_files = []
|
||||
|
||||
asset_group = bpy.data.objects[str(instance)]
|
||||
|
||||
fbx_count = 0
|
||||
|
||||
for asset in asset_group.children:
|
||||
metadata = asset.get(AVALON_PROPERTY)
|
||||
|
||||
|
|
@ -34,6 +136,7 @@ class ExtractLayout(openpype.api.Extractor):
|
|||
family = metadata["family"]
|
||||
|
||||
self.log.debug("Parent: {}".format(parent))
|
||||
# Get blend reference
|
||||
blend = io.find_one(
|
||||
{
|
||||
"type": "representation",
|
||||
|
|
@ -41,10 +144,39 @@ class ExtractLayout(openpype.api.Extractor):
|
|||
"name": "blend"
|
||||
},
|
||||
projection={"_id": True})
|
||||
blend_id = blend["_id"]
|
||||
blend_id = None
|
||||
if blend:
|
||||
blend_id = blend["_id"]
|
||||
# Get fbx reference
|
||||
fbx = io.find_one(
|
||||
{
|
||||
"type": "representation",
|
||||
"parent": io.ObjectId(parent),
|
||||
"name": "fbx"
|
||||
},
|
||||
projection={"_id": True})
|
||||
fbx_id = None
|
||||
if fbx:
|
||||
fbx_id = fbx["_id"]
|
||||
# Get abc reference
|
||||
abc = io.find_one(
|
||||
{
|
||||
"type": "representation",
|
||||
"parent": io.ObjectId(parent),
|
||||
"name": "abc"
|
||||
},
|
||||
projection={"_id": True})
|
||||
abc_id = None
|
||||
if abc:
|
||||
abc_id = abc["_id"]
|
||||
|
||||
json_element = {}
|
||||
json_element["reference"] = str(blend_id)
|
||||
if blend_id:
|
||||
json_element["reference"] = str(blend_id)
|
||||
if fbx_id:
|
||||
json_element["reference_fbx"] = str(fbx_id)
|
||||
if abc_id:
|
||||
json_element["reference_abc"] = str(abc_id)
|
||||
json_element["family"] = family
|
||||
json_element["instance_name"] = asset.name
|
||||
json_element["asset_name"] = metadata["asset_name"]
|
||||
|
|
@ -67,6 +199,16 @@ class ExtractLayout(openpype.api.Extractor):
|
|||
"z": asset.scale.z
|
||||
}
|
||||
}
|
||||
|
||||
# Extract the animation as well
|
||||
if family == "rig":
|
||||
f, n = self._export_animation(
|
||||
asset, instance, stagingdir, fbx_count)
|
||||
if f:
|
||||
fbx_files.append(f)
|
||||
json_element["animation"] = f
|
||||
fbx_count = n
|
||||
|
||||
json_data.append(json_element)
|
||||
|
||||
json_filename = "{}.json".format(instance.name)
|
||||
|
|
@ -75,16 +217,32 @@ class ExtractLayout(openpype.api.Extractor):
|
|||
with open(json_path, "w+") as file:
|
||||
json.dump(json_data, fp=file, indent=2)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
json_representation = {
|
||||
'name': 'json',
|
||||
'ext': 'json',
|
||||
'files': json_filename,
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
instance.data["representations"].append(json_representation)
|
||||
|
||||
self.log.debug(fbx_files)
|
||||
|
||||
if len(fbx_files) == 1:
|
||||
fbx_representation = {
|
||||
'name': 'fbx',
|
||||
'ext': '000.fbx',
|
||||
'files': fbx_files[0],
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
instance.data["representations"].append(fbx_representation)
|
||||
elif len(fbx_files) > 1:
|
||||
fbx_representation = {
|
||||
'name': 'fbx',
|
||||
'ext': 'fbx',
|
||||
'files': fbx_files,
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
instance.data["representations"].append(fbx_representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s",
|
||||
instance.name, representation)
|
||||
instance.name, json_representation)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import pyblish.api
|
||||
import avalon.blender.workio
|
||||
from openpype.hosts.blender.api.workio import save_file
|
||||
|
||||
|
||||
class IncrementWorkfileVersion(pyblish.api.ContextPlugin):
|
||||
|
|
@ -9,7 +9,7 @@ class IncrementWorkfileVersion(pyblish.api.ContextPlugin):
|
|||
label = "Increment Workfile Version"
|
||||
optional = True
|
||||
hosts = ["blender"]
|
||||
families = ["animation", "model", "rig", "action"]
|
||||
families = ["animation", "model", "rig", "action", "layout"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
|
|
@ -20,6 +20,6 @@ class IncrementWorkfileVersion(pyblish.api.ContextPlugin):
|
|||
path = context.data["currentFile"]
|
||||
filepath = version_up(path)
|
||||
|
||||
avalon.blender.workio.save_file(filepath, copy=False)
|
||||
save_file(filepath, copy=False)
|
||||
|
||||
self.log.info('Incrementing script version')
|
||||
|
|
|
|||
|
|
@ -5,15 +5,15 @@ import openpype.hosts.blender.api.action
|
|||
|
||||
|
||||
class ValidateObjectIsInObjectMode(pyblish.api.InstancePlugin):
|
||||
"""Validate that the current object is in Object Mode."""
|
||||
"""Validate that the objects in the instance are in Object Mode."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder - 0.01
|
||||
hosts = ["blender"]
|
||||
families = ["model", "rig"]
|
||||
families = ["model", "rig", "layout"]
|
||||
category = "geometry"
|
||||
label = "Object is in Object Mode"
|
||||
label = "Validate Object Mode"
|
||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||
optional = True
|
||||
optional = False
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance) -> List:
|
||||
|
|
|
|||
|
|
@ -1,3 +0,0 @@
|
|||
from openpype.hosts.blender import api
|
||||
|
||||
api.install()
|
||||
|
|
@ -1,9 +1,9 @@
|
|||
import os
|
||||
import re
|
||||
import json
|
||||
import getpass
|
||||
|
||||
from avalon.vendor import requests
|
||||
import re
|
||||
import requests
|
||||
import pyblish.api
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -3,3 +3,20 @@ import os
|
|||
HOST_DIR = os.path.dirname(
|
||||
os.path.abspath(__file__)
|
||||
)
|
||||
|
||||
|
||||
def add_implementation_envs(env, _app):
|
||||
# Add requirements to DL_PYTHON_HOOK_PATH
|
||||
pype_root = os.environ["OPENPYPE_REPOS_ROOT"]
|
||||
|
||||
env["DL_PYTHON_HOOK_PATH"] = os.path.join(
|
||||
pype_root, "openpype", "hosts", "flame", "startup")
|
||||
env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None)
|
||||
|
||||
# Set default values if are not already set via settings
|
||||
defaults = {
|
||||
"LOGLEVEL": "DEBUG"
|
||||
}
|
||||
for key, value in defaults.items():
|
||||
if not env.get(key):
|
||||
env[key] = value
|
||||
|
|
|
|||
|
|
@ -28,7 +28,8 @@ from .lib import (
|
|||
get_reformated_filename,
|
||||
get_frame_from_filename,
|
||||
get_padding_from_filename,
|
||||
maintained_object_duplication
|
||||
maintained_object_duplication,
|
||||
get_clip_segment
|
||||
)
|
||||
from .utils import (
|
||||
setup,
|
||||
|
|
@ -52,7 +53,10 @@ from .menu import (
|
|||
)
|
||||
from .plugin import (
|
||||
Creator,
|
||||
PublishableClip
|
||||
PublishableClip,
|
||||
ClipLoader,
|
||||
OpenClipSolver
|
||||
|
||||
)
|
||||
from .workio import (
|
||||
open_file,
|
||||
|
|
@ -96,6 +100,7 @@ __all__ = [
|
|||
"get_frame_from_filename",
|
||||
"get_padding_from_filename",
|
||||
"maintained_object_duplication",
|
||||
"get_clip_segment",
|
||||
|
||||
# pipeline
|
||||
"install",
|
||||
|
|
@ -122,6 +127,8 @@ __all__ = [
|
|||
# plugin
|
||||
"Creator",
|
||||
"PublishableClip",
|
||||
"ClipLoader",
|
||||
"OpenClipSolver",
|
||||
|
||||
# workio
|
||||
"open_file",
|
||||
|
|
|
|||
|
|
@ -527,6 +527,7 @@ def get_segment_attributes(segment):
|
|||
|
||||
# Add timeline segment to tree
|
||||
clip_data = {
|
||||
"shot_name": segment.shot_name.get_value(),
|
||||
"segment_name": segment.name.get_value(),
|
||||
"segment_comment": segment.comment.get_value(),
|
||||
"tape_name": segment.tape_name,
|
||||
|
|
@ -692,3 +693,18 @@ def maintained_object_duplication(item):
|
|||
finally:
|
||||
# delete the item at the end
|
||||
flame.delete(duplicate)
|
||||
|
||||
|
||||
def get_clip_segment(flame_clip):
|
||||
name = flame_clip.name.get_value()
|
||||
version = flame_clip.versions[0]
|
||||
track = version.tracks[0]
|
||||
segments = track.segments
|
||||
|
||||
if len(segments) < 1:
|
||||
raise ValueError("Clip `{}` has no segments!".format(name))
|
||||
|
||||
if len(segments) > 1:
|
||||
raise ValueError("Clip `{}` has too many segments!".format(name))
|
||||
|
||||
return segments[0]
|
||||
|
|
|
|||
|
|
@ -110,19 +110,19 @@ class FlameMenuProjectConnect(_FlameMenuApp):
|
|||
menu = deepcopy(self.menu)
|
||||
|
||||
menu['actions'].append({
|
||||
"name": "Workfiles ...",
|
||||
"name": "Workfiles...",
|
||||
"execute": lambda x: self.tools_helper.show_workfiles()
|
||||
})
|
||||
menu['actions'].append({
|
||||
"name": "Load ...",
|
||||
"name": "Load...",
|
||||
"execute": lambda x: self.tools_helper.show_loader()
|
||||
})
|
||||
menu['actions'].append({
|
||||
"name": "Manage ...",
|
||||
"name": "Manage...",
|
||||
"execute": lambda x: self.tools_helper.show_scene_inventory()
|
||||
})
|
||||
menu['actions'].append({
|
||||
"name": "Library ...",
|
||||
"name": "Library...",
|
||||
"execute": lambda x: self.tools_helper.show_library_loader()
|
||||
})
|
||||
return menu
|
||||
|
|
@ -164,24 +164,27 @@ class FlameMenuTimeline(_FlameMenuApp):
|
|||
menu = deepcopy(self.menu)
|
||||
|
||||
menu['actions'].append({
|
||||
"name": "Create ...",
|
||||
"name": "Create...",
|
||||
"execute": lambda x: callback_selection(
|
||||
x, self.tools_helper.show_creator)
|
||||
})
|
||||
menu['actions'].append({
|
||||
"name": "Publish ...",
|
||||
"name": "Publish...",
|
||||
"execute": lambda x: callback_selection(
|
||||
x, self.tools_helper.show_publish)
|
||||
})
|
||||
menu['actions'].append({
|
||||
"name": "Load ...",
|
||||
"name": "Load...",
|
||||
"execute": lambda x: self.tools_helper.show_loader()
|
||||
})
|
||||
menu['actions'].append({
|
||||
"name": "Manage ...",
|
||||
"name": "Manage...",
|
||||
"execute": lambda x: self.tools_helper.show_scene_inventory()
|
||||
})
|
||||
|
||||
menu['actions'].append({
|
||||
"name": "Library...",
|
||||
"execute": lambda x: self.tools_helper.show_library_loader()
|
||||
})
|
||||
return menu
|
||||
|
||||
def refresh(self, *args, **kwargs):
|
||||
|
|
|
|||
|
|
@ -4,8 +4,10 @@ Basic avalon integration
|
|||
import os
|
||||
import contextlib
|
||||
from avalon import api as avalon
|
||||
from avalon.pipeline import AVALON_CONTAINER_ID
|
||||
from pyblish import api as pyblish
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from .lib import (
|
||||
set_segment_data_marker,
|
||||
set_publish_attribute,
|
||||
|
|
@ -28,23 +30,11 @@ log = Logger.get_logger(__name__)
|
|||
|
||||
|
||||
def install():
|
||||
# Disable all families except for the ones we explicitly want to see
|
||||
family_states = [
|
||||
"imagesequence",
|
||||
"render2d",
|
||||
"plate",
|
||||
"render",
|
||||
"mov",
|
||||
"clip"
|
||||
]
|
||||
avalon.data["familiesStateDefault"] = False
|
||||
avalon.data["familiesStateToggled"] = family_states
|
||||
|
||||
|
||||
pyblish.register_host("flame")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
log.info("OpenPype Flame plug-ins registred ...")
|
||||
|
||||
|
|
@ -59,7 +49,7 @@ def uninstall():
|
|||
log.info("Deregistering Flame plug-ins..")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
avalon.deregister_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
# register callback for switching publishable
|
||||
|
|
@ -68,14 +58,31 @@ def uninstall():
|
|||
log.info("OpenPype Flame host uninstalled ...")
|
||||
|
||||
|
||||
def containerise(tl_segment,
|
||||
def containerise(flame_clip_segment,
|
||||
name,
|
||||
namespace,
|
||||
context,
|
||||
loader=None,
|
||||
data=None):
|
||||
# TODO: containerise
|
||||
pass
|
||||
|
||||
data_imprint = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": str(name),
|
||||
"namespace": str(namespace),
|
||||
"loader": str(loader),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
}
|
||||
|
||||
if data:
|
||||
for k, v in data.items():
|
||||
data_imprint[k] = v
|
||||
|
||||
log.debug("_ data_imprint: {}".format(data_imprint))
|
||||
|
||||
set_segment_data_marker(flame_clip_segment, data_imprint)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def ls():
|
||||
|
|
|
|||
|
|
@ -1,7 +1,15 @@
|
|||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from xml.etree import ElementTree as ET
|
||||
import six
|
||||
import qargparse
|
||||
from Qt import QtWidgets, QtCore
|
||||
import openpype.api as openpype
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from openpype import style
|
||||
import avalon.api as avalon
|
||||
from . import (
|
||||
lib as flib,
|
||||
pipeline as fpipeline,
|
||||
|
|
@ -292,7 +300,7 @@ class Spacer(QtWidgets.QWidget):
|
|||
self.setLayout(layout)
|
||||
|
||||
|
||||
class Creator(openpype.Creator):
|
||||
class Creator(LegacyCreator):
|
||||
"""Creator class wrapper
|
||||
"""
|
||||
clip_color = constants.COLOR_MAP["purple"]
|
||||
|
|
@ -354,6 +362,7 @@ class PublishableClip:
|
|||
vertical_sync_default = False
|
||||
driving_layer_default = ""
|
||||
index_from_segment_default = False
|
||||
use_shot_name_default = False
|
||||
|
||||
def __init__(self, segment, **kwargs):
|
||||
self.rename_index = kwargs["rename_index"]
|
||||
|
|
@ -369,6 +378,7 @@ class PublishableClip:
|
|||
# segment (clip) main attributes
|
||||
self.cs_name = self.clip_data["segment_name"]
|
||||
self.cs_index = int(self.clip_data["segment"])
|
||||
self.shot_name = self.clip_data["shot_name"]
|
||||
|
||||
# get track name and index
|
||||
self.track_index = int(self.clip_data["track"])
|
||||
|
|
@ -412,18 +422,21 @@ class PublishableClip:
|
|||
# deal with clip name
|
||||
new_name = self.marker_data.pop("newClipName")
|
||||
|
||||
if self.rename:
|
||||
if self.rename and not self.use_shot_name:
|
||||
# rename segment
|
||||
self.current_segment.name = str(new_name)
|
||||
self.marker_data["asset"] = str(new_name)
|
||||
elif self.use_shot_name:
|
||||
self.marker_data["asset"] = self.shot_name
|
||||
self.marker_data["hierarchyData"]["shot"] = self.shot_name
|
||||
else:
|
||||
self.marker_data["asset"] = self.cs_name
|
||||
self.marker_data["hierarchyData"]["shot"] = self.cs_name
|
||||
|
||||
if self.marker_data["heroTrack"] and self.review_layer:
|
||||
self.marker_data.update({"reviewTrack": self.review_layer})
|
||||
self.marker_data["reviewTrack"] = self.review_layer
|
||||
else:
|
||||
self.marker_data.update({"reviewTrack": None})
|
||||
self.marker_data["reviewTrack"] = None
|
||||
|
||||
# create pype tag on track_item and add data
|
||||
fpipeline.imprint(self.current_segment, self.marker_data)
|
||||
|
|
@ -456,6 +469,8 @@ class PublishableClip:
|
|||
# ui_inputs data or default values if gui was not used
|
||||
self.rename = self.ui_inputs.get(
|
||||
"clipRename", {}).get("value") or self.rename_default
|
||||
self.use_shot_name = self.ui_inputs.get(
|
||||
"useShotName", {}).get("value") or self.use_shot_name_default
|
||||
self.clip_name = self.ui_inputs.get(
|
||||
"clipName", {}).get("value") or self.clip_name_default
|
||||
self.hierarchy = self.ui_inputs.get(
|
||||
|
|
@ -644,3 +659,274 @@ class PublishableClip:
|
|||
|
||||
# Publishing plugin functions
|
||||
# Loader plugin functions
|
||||
|
||||
class ClipLoader(avalon.Loader):
|
||||
"""A basic clip loader for Flame
|
||||
|
||||
This will implement the basic behavior for a loader to inherit from that
|
||||
will containerize the reference and will implement the `remove` and
|
||||
`update` logic.
|
||||
|
||||
"""
|
||||
|
||||
options = [
|
||||
qargparse.Boolean(
|
||||
"handles",
|
||||
label="Set handles",
|
||||
default=0,
|
||||
help="Also set handles to clip as In/Out marks"
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class OpenClipSolver:
|
||||
media_script_path = "/opt/Autodesk/mio/current/dl_get_media_info"
|
||||
tmp_name = "_tmp.clip"
|
||||
tmp_file = None
|
||||
create_new_clip = False
|
||||
|
||||
out_feed_nb_ticks = None
|
||||
out_feed_fps = None
|
||||
out_feed_drop_mode = None
|
||||
|
||||
log = log
|
||||
|
||||
def __init__(self, openclip_file_path, feed_data):
|
||||
# test if media script paht exists
|
||||
self._validate_media_script_path()
|
||||
|
||||
# new feed variables:
|
||||
feed_path = feed_data["path"]
|
||||
self.feed_version_name = feed_data["version"]
|
||||
self.feed_colorspace = feed_data.get("colorspace")
|
||||
|
||||
if feed_data.get("logger"):
|
||||
self.log = feed_data["logger"]
|
||||
|
||||
# derivate other feed variables
|
||||
self.feed_basename = os.path.basename(feed_path)
|
||||
self.feed_dir = os.path.dirname(feed_path)
|
||||
self.feed_ext = os.path.splitext(self.feed_basename)[1][1:].lower()
|
||||
|
||||
if not os.path.isfile(openclip_file_path):
|
||||
# openclip does not exist yet and will be created
|
||||
self.tmp_file = self.out_file = openclip_file_path
|
||||
self.create_new_clip = True
|
||||
|
||||
else:
|
||||
# output a temp file
|
||||
self.out_file = openclip_file_path
|
||||
self.tmp_file = os.path.join(self.feed_dir, self.tmp_name)
|
||||
self._clear_tmp_file()
|
||||
|
||||
self.log.info("Temp File: {}".format(self.tmp_file))
|
||||
|
||||
def make(self):
|
||||
self._generate_media_info_file()
|
||||
|
||||
if self.create_new_clip:
|
||||
# New openClip
|
||||
self._create_new_open_clip()
|
||||
else:
|
||||
self._update_open_clip()
|
||||
|
||||
def _validate_media_script_path(self):
|
||||
if not os.path.isfile(self.media_script_path):
|
||||
raise IOError("Media Scirpt does not exist: `{}`".format(
|
||||
self.media_script_path))
|
||||
|
||||
def _generate_media_info_file(self):
|
||||
# Create cmd arguments for gettig xml file info file
|
||||
cmd_args = [
|
||||
self.media_script_path,
|
||||
"-e", self.feed_ext,
|
||||
"-o", self.tmp_file,
|
||||
self.feed_dir
|
||||
]
|
||||
|
||||
# execute creation of clip xml template data
|
||||
try:
|
||||
openpype.run_subprocess(cmd_args)
|
||||
except TypeError:
|
||||
self.log.error("Error creating self.tmp_file")
|
||||
six.reraise(*sys.exc_info())
|
||||
|
||||
def _clear_tmp_file(self):
|
||||
if os.path.isfile(self.tmp_file):
|
||||
os.remove(self.tmp_file)
|
||||
|
||||
def _clear_handler(self, xml_object):
|
||||
for handler in xml_object.findall("./handler"):
|
||||
self.log.debug("Handler found")
|
||||
xml_object.remove(handler)
|
||||
|
||||
def _create_new_open_clip(self):
|
||||
self.log.info("Building new openClip")
|
||||
|
||||
tmp_xml = ET.parse(self.tmp_file)
|
||||
|
||||
tmp_xml_feeds = tmp_xml.find('tracks/track/feeds')
|
||||
tmp_xml_feeds.set('currentVersion', self.feed_version_name)
|
||||
for tmp_feed in tmp_xml_feeds:
|
||||
tmp_feed.set('vuid', self.feed_version_name)
|
||||
|
||||
# add colorspace if any is set
|
||||
if self.feed_colorspace:
|
||||
self._add_colorspace(tmp_feed, self.feed_colorspace)
|
||||
|
||||
self._clear_handler(tmp_feed)
|
||||
|
||||
tmp_xml_versions_obj = tmp_xml.find('versions')
|
||||
tmp_xml_versions_obj.set('currentVersion', self.feed_version_name)
|
||||
for xml_new_version in tmp_xml_versions_obj:
|
||||
xml_new_version.set('uid', self.feed_version_name)
|
||||
xml_new_version.set('type', 'version')
|
||||
|
||||
xml_data = self._fix_xml_data(tmp_xml)
|
||||
self.log.info("Adding feed version: {}".format(self.feed_basename))
|
||||
|
||||
self._write_result_xml_to_file(xml_data)
|
||||
|
||||
self.log.info("openClip Updated: {}".format(self.tmp_file))
|
||||
|
||||
def _update_open_clip(self):
|
||||
self.log.info("Updating openClip ..")
|
||||
|
||||
out_xml = ET.parse(self.out_file)
|
||||
tmp_xml = ET.parse(self.tmp_file)
|
||||
|
||||
self.log.debug(">> out_xml: {}".format(out_xml))
|
||||
self.log.debug(">> tmp_xml: {}".format(tmp_xml))
|
||||
|
||||
# Get new feed from tmp file
|
||||
tmp_xml_feed = tmp_xml.find('tracks/track/feeds/feed')
|
||||
|
||||
self._clear_handler(tmp_xml_feed)
|
||||
self._get_time_info_from_origin(out_xml)
|
||||
|
||||
if self.out_feed_fps:
|
||||
tmp_feed_fps_obj = tmp_xml_feed.find(
|
||||
"startTimecode/rate")
|
||||
tmp_feed_fps_obj.text = self.out_feed_fps
|
||||
if self.out_feed_nb_ticks:
|
||||
tmp_feed_nb_ticks_obj = tmp_xml_feed.find(
|
||||
"startTimecode/nbTicks")
|
||||
tmp_feed_nb_ticks_obj.text = self.out_feed_nb_ticks
|
||||
if self.out_feed_drop_mode:
|
||||
tmp_feed_drop_mode_obj = tmp_xml_feed.find(
|
||||
"startTimecode/dropMode")
|
||||
tmp_feed_drop_mode_obj.text = self.out_feed_drop_mode
|
||||
|
||||
new_path_obj = tmp_xml_feed.find(
|
||||
"spans/span/path")
|
||||
new_path = new_path_obj.text
|
||||
|
||||
feed_added = False
|
||||
if not self._feed_exists(out_xml, new_path):
|
||||
tmp_xml_feed.set('vuid', self.feed_version_name)
|
||||
# Append new temp file feed to .clip source out xml
|
||||
out_track = out_xml.find("tracks/track")
|
||||
# add colorspace if any is set
|
||||
if self.feed_colorspace:
|
||||
self._add_colorspace(tmp_xml_feed, self.feed_colorspace)
|
||||
|
||||
out_feeds = out_track.find('feeds')
|
||||
out_feeds.set('currentVersion', self.feed_version_name)
|
||||
out_feeds.append(tmp_xml_feed)
|
||||
|
||||
self.log.info(
|
||||
"Appending new feed: {}".format(
|
||||
self.feed_version_name))
|
||||
feed_added = True
|
||||
|
||||
if feed_added:
|
||||
# Append vUID to versions
|
||||
out_xml_versions_obj = out_xml.find('versions')
|
||||
out_xml_versions_obj.set(
|
||||
'currentVersion', self.feed_version_name)
|
||||
new_version_obj = ET.Element(
|
||||
"version", {"type": "version", "uid": self.feed_version_name})
|
||||
out_xml_versions_obj.insert(0, new_version_obj)
|
||||
|
||||
xml_data = self._fix_xml_data(out_xml)
|
||||
|
||||
# fist create backup
|
||||
self._create_openclip_backup_file(self.out_file)
|
||||
|
||||
self.log.info("Adding feed version: {}".format(
|
||||
self.feed_version_name))
|
||||
|
||||
self._write_result_xml_to_file(xml_data)
|
||||
|
||||
self.log.info("openClip Updated: {}".format(self.out_file))
|
||||
|
||||
self._clear_tmp_file()
|
||||
|
||||
def _get_time_info_from_origin(self, xml_data):
|
||||
try:
|
||||
for out_track in xml_data.iter('track'):
|
||||
for out_feed in out_track.iter('feed'):
|
||||
out_feed_nb_ticks_obj = out_feed.find(
|
||||
'startTimecode/nbTicks')
|
||||
self.out_feed_nb_ticks = out_feed_nb_ticks_obj.text
|
||||
out_feed_fps_obj = out_feed.find(
|
||||
'startTimecode/rate')
|
||||
self.out_feed_fps = out_feed_fps_obj.text
|
||||
out_feed_drop_mode_obj = out_feed.find(
|
||||
'startTimecode/dropMode')
|
||||
self.out_feed_drop_mode = out_feed_drop_mode_obj.text
|
||||
break
|
||||
else:
|
||||
continue
|
||||
except Exception as msg:
|
||||
self.log.warning(msg)
|
||||
|
||||
def _feed_exists(self, xml_data, path):
|
||||
# loop all available feed paths and check if
|
||||
# the path is not already in file
|
||||
for src_path in xml_data.iter('path'):
|
||||
if path == src_path.text:
|
||||
self.log.warning(
|
||||
"Not appending file as it already is in .clip file")
|
||||
return True
|
||||
|
||||
def _fix_xml_data(self, xml_data):
|
||||
xml_root = xml_data.getroot()
|
||||
self._clear_handler(xml_root)
|
||||
return ET.tostring(xml_root).decode('utf-8')
|
||||
|
||||
def _write_result_xml_to_file(self, xml_data):
|
||||
with open(self.out_file, "w") as f:
|
||||
f.write(xml_data)
|
||||
|
||||
def _create_openclip_backup_file(self, file):
|
||||
bck_file = "{}.bak".format(file)
|
||||
# if backup does not exist
|
||||
if not os.path.isfile(bck_file):
|
||||
shutil.copy2(file, bck_file)
|
||||
else:
|
||||
# in case it exists and is already multiplied
|
||||
created = False
|
||||
for _i in range(1, 99):
|
||||
bck_file = "{name}.bak.{idx:0>2}".format(
|
||||
name=file,
|
||||
idx=_i)
|
||||
# create numbered backup file
|
||||
if not os.path.isfile(bck_file):
|
||||
shutil.copy2(file, bck_file)
|
||||
created = True
|
||||
break
|
||||
# in case numbered does not exists
|
||||
if not created:
|
||||
bck_file = "{}.bak.last".format(file)
|
||||
shutil.copy2(file, bck_file)
|
||||
|
||||
def _add_colorspace(self, feed_obj, profile_name):
|
||||
feed_storage_obj = feed_obj.find("storageFormat")
|
||||
feed_clr_obj = feed_storage_obj.find("colourSpace")
|
||||
if feed_clr_obj is not None:
|
||||
feed_clr_obj = ET.Element(
|
||||
"colourSpace", {"type": "string"})
|
||||
feed_storage_obj.append(feed_clr_obj)
|
||||
|
||||
feed_clr_obj.text = profile_name
|
||||
|
|
|
|||
|
|
@ -4,9 +4,13 @@ import tempfile
|
|||
import contextlib
|
||||
import socket
|
||||
from openpype.lib import (
|
||||
PreLaunchHook, get_openpype_username)
|
||||
PreLaunchHook,
|
||||
get_openpype_username
|
||||
)
|
||||
from openpype.lib.applications import (
|
||||
ApplicationLaunchFailed
|
||||
)
|
||||
from openpype.hosts import flame as opflame
|
||||
import openpype.hosts.flame.api as opfapi
|
||||
import openpype
|
||||
from pprint import pformat
|
||||
|
||||
|
|
@ -33,7 +37,25 @@ class FlamePrelaunch(PreLaunchHook):
|
|||
|
||||
"""Hook entry method."""
|
||||
project_doc = self.data["project_doc"]
|
||||
project_name = project_doc["name"]
|
||||
|
||||
# get image io
|
||||
project_anatomy = self.data["anatomy"]
|
||||
|
||||
# make sure anatomy settings are having flame key
|
||||
if not project_anatomy["imageio"].get("flame"):
|
||||
raise ApplicationLaunchFailed((
|
||||
"Anatomy project settings are missing `flame` key. "
|
||||
"Please make sure you remove project overides on "
|
||||
"Anatomy Image io")
|
||||
)
|
||||
|
||||
imageio_flame = project_anatomy["imageio"]["flame"]
|
||||
|
||||
# get user name and host name
|
||||
user_name = get_openpype_username()
|
||||
user_name = user_name.replace(".", "_")
|
||||
|
||||
hostname = socket.gethostname() # not returning wiretap host name
|
||||
|
||||
self.log.debug("Collected user \"{}\"".format(user_name))
|
||||
|
|
@ -41,7 +63,7 @@ class FlamePrelaunch(PreLaunchHook):
|
|||
_db_p_data = project_doc["data"]
|
||||
width = _db_p_data["resolutionWidth"]
|
||||
height = _db_p_data["resolutionHeight"]
|
||||
fps = int(_db_p_data["fps"])
|
||||
fps = float(_db_p_data["fps"])
|
||||
|
||||
project_data = {
|
||||
"Name": project_doc["name"],
|
||||
|
|
@ -52,8 +74,8 @@ class FlamePrelaunch(PreLaunchHook):
|
|||
"FrameHeight": int(height),
|
||||
"AspectRatio": float((width / height) * _db_p_data["pixelAspect"]),
|
||||
"FrameRate": "{} fps".format(fps),
|
||||
"FrameDepth": "16-bit fp",
|
||||
"FieldDominance": "PROGRESSIVE"
|
||||
"FrameDepth": str(imageio_flame["project"]["frameDepth"]),
|
||||
"FieldDominance": str(imageio_flame["project"]["fieldDominance"])
|
||||
}
|
||||
|
||||
data_to_script = {
|
||||
|
|
@ -61,10 +83,10 @@ class FlamePrelaunch(PreLaunchHook):
|
|||
"host_name": _env.get("FLAME_WIRETAP_HOSTNAME") or hostname,
|
||||
"volume_name": _env.get("FLAME_WIRETAP_VOLUME"),
|
||||
"group_name": _env.get("FLAME_WIRETAP_GROUP"),
|
||||
"color_policy": "ACES 1.1",
|
||||
"color_policy": str(imageio_flame["project"]["colourPolicy"]),
|
||||
|
||||
# from project
|
||||
"project_name": project_doc["name"],
|
||||
"project_name": project_name,
|
||||
"user_name": user_name,
|
||||
"project_data": project_data
|
||||
}
|
||||
|
|
@ -77,8 +99,6 @@ class FlamePrelaunch(PreLaunchHook):
|
|||
|
||||
app_arguments = self._get_launch_arguments(data_to_script)
|
||||
|
||||
opfapi.setup(self.launch_context.env)
|
||||
|
||||
self.launch_context.launch_args.extend(app_arguments)
|
||||
|
||||
def _add_pythonpath(self):
|
||||
|
|
|
|||
|
|
@ -87,41 +87,48 @@ class CreateShotClip(opfapi.Creator):
|
|||
"target": "tag",
|
||||
"toolTip": "Parents folder for shot root folder, Template filled with `Hierarchy Data` section", # noqa
|
||||
"order": 0},
|
||||
"useShotName": {
|
||||
"value": True,
|
||||
"type": "QCheckBox",
|
||||
"label": "Use Shot Name",
|
||||
"target": "ui",
|
||||
"toolTip": "Use name form Shot name clip attribute", # noqa
|
||||
"order": 1},
|
||||
"clipRename": {
|
||||
"value": False,
|
||||
"type": "QCheckBox",
|
||||
"label": "Rename clips",
|
||||
"target": "ui",
|
||||
"toolTip": "Renaming selected clips on fly", # noqa
|
||||
"order": 1},
|
||||
"order": 2},
|
||||
"clipName": {
|
||||
"value": "{sequence}{shot}",
|
||||
"type": "QLineEdit",
|
||||
"label": "Clip Name Template",
|
||||
"target": "ui",
|
||||
"toolTip": "template for creating shot namespaused for renaming (use rename: on)", # noqa
|
||||
"order": 2},
|
||||
"order": 3},
|
||||
"segmentIndex": {
|
||||
"value": True,
|
||||
"type": "QCheckBox",
|
||||
"label": "Segment index",
|
||||
"target": "ui",
|
||||
"toolTip": "Take number from segment index", # noqa
|
||||
"order": 3},
|
||||
"order": 4},
|
||||
"countFrom": {
|
||||
"value": 10,
|
||||
"type": "QSpinBox",
|
||||
"label": "Count sequence from",
|
||||
"target": "ui",
|
||||
"toolTip": "Set when the sequence number stafrom", # noqa
|
||||
"order": 4},
|
||||
"order": 5},
|
||||
"countSteps": {
|
||||
"value": 10,
|
||||
"type": "QSpinBox",
|
||||
"label": "Stepping number",
|
||||
"target": "ui",
|
||||
"toolTip": "What number is adding every new step", # noqa
|
||||
"order": 5},
|
||||
"order": 6},
|
||||
}
|
||||
},
|
||||
"hierarchyData": {
|
||||
|
|
|
|||
247
openpype/hosts/flame/plugins/load/load_clip.py
Normal file
247
openpype/hosts/flame/plugins/load/load_clip.py
Normal file
|
|
@ -0,0 +1,247 @@
|
|||
import os
|
||||
import flame
|
||||
from pprint import pformat
|
||||
import openpype.hosts.flame.api as opfapi
|
||||
|
||||
|
||||
class LoadClip(opfapi.ClipLoader):
|
||||
"""Load a subset to timeline as clip
|
||||
|
||||
Place clip to timeline on its asset origin timings collected
|
||||
during conforming to project
|
||||
"""
|
||||
|
||||
families = ["render2d", "source", "plate", "render", "review"]
|
||||
representations = ["exr", "dpx", "jpg", "jpeg", "png", "h264"]
|
||||
|
||||
label = "Load as clip"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
# settings
|
||||
reel_group_name = "OpenPype_Reels"
|
||||
reel_name = "Loaded"
|
||||
clip_name_template = "{asset}_{subset}_{representation}"
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
|
||||
# get flame objects
|
||||
fproject = flame.project.current_project
|
||||
self.fpd = fproject.current_workspace.desktop
|
||||
|
||||
# load clip to timeline and get main variables
|
||||
namespace = namespace
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
version_name = version.get("name", None)
|
||||
colorspace = version_data.get("colorspace", None)
|
||||
clip_name = self.clip_name_template.format(
|
||||
**context["representation"]["context"])
|
||||
|
||||
# todo: settings in imageio
|
||||
# convert colorspace with ocio to flame mapping
|
||||
# in imageio flame section
|
||||
colorspace = colorspace
|
||||
|
||||
# create workfile path
|
||||
workfile_dir = os.environ["AVALON_WORKDIR"]
|
||||
openclip_dir = os.path.join(
|
||||
workfile_dir, clip_name
|
||||
)
|
||||
openclip_path = os.path.join(
|
||||
openclip_dir, clip_name + ".clip"
|
||||
)
|
||||
if not os.path.exists(openclip_dir):
|
||||
os.makedirs(openclip_dir)
|
||||
|
||||
# prepare clip data from context ad send it to openClipLoader
|
||||
loading_context = {
|
||||
"path": self.fname.replace("\\", "/"),
|
||||
"colorspace": colorspace,
|
||||
"version": "v{:0>3}".format(version_name),
|
||||
"logger": self.log
|
||||
|
||||
}
|
||||
self.log.debug(pformat(
|
||||
loading_context
|
||||
))
|
||||
self.log.debug(openclip_path)
|
||||
|
||||
# make openpype clip file
|
||||
opfapi.OpenClipSolver(openclip_path, loading_context).make()
|
||||
|
||||
# prepare Reel group in actual desktop
|
||||
opc = self._get_clip(
|
||||
clip_name,
|
||||
openclip_path
|
||||
)
|
||||
|
||||
# add additional metadata from the version to imprint Avalon knob
|
||||
add_keys = [
|
||||
"frameStart", "frameEnd", "source", "author",
|
||||
"fps", "handleStart", "handleEnd"
|
||||
]
|
||||
|
||||
# move all version data keys to tag data
|
||||
data_imprint = {}
|
||||
for key in add_keys:
|
||||
data_imprint.update({
|
||||
key: version_data.get(key, str(None))
|
||||
})
|
||||
|
||||
# add variables related to version context
|
||||
data_imprint.update({
|
||||
"version": version_name,
|
||||
"colorspace": colorspace,
|
||||
"objectName": clip_name
|
||||
})
|
||||
|
||||
# TODO: finish the containerisation
|
||||
# opc_segment = opfapi.get_clip_segment(opc)
|
||||
|
||||
# return opfapi.containerise(
|
||||
# opc_segment,
|
||||
# name, namespace, context,
|
||||
# self.__class__.__name__,
|
||||
# data_imprint)
|
||||
|
||||
return opc
|
||||
|
||||
def _get_clip(self, name, clip_path):
|
||||
reel = self._get_reel()
|
||||
# with maintained openclip as opc
|
||||
matching_clip = [cl for cl in reel.clips
|
||||
if cl.name.get_value() == name]
|
||||
if matching_clip:
|
||||
return matching_clip.pop()
|
||||
else:
|
||||
created_clips = flame.import_clips(str(clip_path), reel)
|
||||
return created_clips.pop()
|
||||
|
||||
def _get_reel(self):
|
||||
|
||||
matching_rgroup = [
|
||||
rg for rg in self.fpd.reel_groups
|
||||
if rg.name.get_value() == self.reel_group_name
|
||||
]
|
||||
|
||||
if not matching_rgroup:
|
||||
reel_group = self.fpd.create_reel_group(str(self.reel_group_name))
|
||||
for _r in reel_group.reels:
|
||||
if "reel" not in _r.name.get_value().lower():
|
||||
continue
|
||||
self.log.debug("Removing: {}".format(_r.name))
|
||||
flame.delete(_r)
|
||||
else:
|
||||
reel_group = matching_rgroup.pop()
|
||||
|
||||
matching_reel = [
|
||||
re for re in reel_group.reels
|
||||
if re.name.get_value() == self.reel_name
|
||||
]
|
||||
|
||||
if not matching_reel:
|
||||
reel_group = reel_group.create_reel(str(self.reel_name))
|
||||
else:
|
||||
reel_group = matching_reel.pop()
|
||||
|
||||
return reel_group
|
||||
|
||||
def _get_segment_from_clip(self, clip):
|
||||
# unwrapping segment from input clip
|
||||
pass
|
||||
|
||||
# def switch(self, container, representation):
|
||||
# self.update(container, representation)
|
||||
|
||||
# def update(self, container, representation):
|
||||
# """ Updating previously loaded clips
|
||||
# """
|
||||
|
||||
# # load clip to timeline and get main variables
|
||||
# name = container['name']
|
||||
# namespace = container['namespace']
|
||||
# track_item = phiero.get_track_items(
|
||||
# track_item_name=namespace)
|
||||
# version = io.find_one({
|
||||
# "type": "version",
|
||||
# "_id": representation["parent"]
|
||||
# })
|
||||
# version_data = version.get("data", {})
|
||||
# version_name = version.get("name", None)
|
||||
# colorspace = version_data.get("colorspace", None)
|
||||
# object_name = "{}_{}".format(name, namespace)
|
||||
# file = api.get_representation_path(representation).replace("\\", "/")
|
||||
# clip = track_item.source()
|
||||
|
||||
# # reconnect media to new path
|
||||
# clip.reconnectMedia(file)
|
||||
|
||||
# # set colorspace
|
||||
# if colorspace:
|
||||
# clip.setSourceMediaColourTransform(colorspace)
|
||||
|
||||
# # add additional metadata from the version to imprint Avalon knob
|
||||
# add_keys = [
|
||||
# "frameStart", "frameEnd", "source", "author",
|
||||
# "fps", "handleStart", "handleEnd"
|
||||
# ]
|
||||
|
||||
# # move all version data keys to tag data
|
||||
# data_imprint = {}
|
||||
# for key in add_keys:
|
||||
# data_imprint.update({
|
||||
# key: version_data.get(key, str(None))
|
||||
# })
|
||||
|
||||
# # add variables related to version context
|
||||
# data_imprint.update({
|
||||
# "representation": str(representation["_id"]),
|
||||
# "version": version_name,
|
||||
# "colorspace": colorspace,
|
||||
# "objectName": object_name
|
||||
# })
|
||||
|
||||
# # update color of clip regarding the version order
|
||||
# self.set_item_color(track_item, version)
|
||||
|
||||
# return phiero.update_container(track_item, data_imprint)
|
||||
|
||||
# def remove(self, container):
|
||||
# """ Removing previously loaded clips
|
||||
# """
|
||||
# # load clip to timeline and get main variables
|
||||
# namespace = container['namespace']
|
||||
# track_item = phiero.get_track_items(
|
||||
# track_item_name=namespace)
|
||||
# track = track_item.parent()
|
||||
|
||||
# # remove track item from track
|
||||
# track.removeItem(track_item)
|
||||
|
||||
# @classmethod
|
||||
# def multiselection(cls, track_item):
|
||||
# if not cls.track:
|
||||
# cls.track = track_item.parent()
|
||||
# cls.sequence = cls.track.parent()
|
||||
|
||||
# @classmethod
|
||||
# def set_item_color(cls, track_item, version):
|
||||
|
||||
# clip = track_item.source()
|
||||
# # define version name
|
||||
# version_name = version.get("name", None)
|
||||
# # get all versions in list
|
||||
# versions = io.find({
|
||||
# "type": "version",
|
||||
# "parent": version["parent"]
|
||||
# }).distinct('name')
|
||||
|
||||
# max_version = max(versions)
|
||||
|
||||
# # set clip colour
|
||||
# if version_name == max_version:
|
||||
# clip.binItem().setColor(cls.clip_color_last)
|
||||
# else:
|
||||
# clip.binItem().setColor(cls.clip_color)
|
||||
|
|
@ -22,6 +22,7 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
"ext": "jpg",
|
||||
"xml_preset_file": "Jpeg (8-bit).xml",
|
||||
"xml_preset_dir": "",
|
||||
"colorspace_out": "Output - sRGB",
|
||||
"representation_add_range": False,
|
||||
"representation_tags": ["thumbnail"]
|
||||
},
|
||||
|
|
@ -29,6 +30,7 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
"ext": "mov",
|
||||
"xml_preset_file": "Apple iPad (1920x1080).xml",
|
||||
"xml_preset_dir": "",
|
||||
"colorspace_out": "Output - Rec.709",
|
||||
"representation_add_range": True,
|
||||
"representation_tags": [
|
||||
"review",
|
||||
|
|
@ -45,7 +47,6 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
export_presets_mapping = {}
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
if (
|
||||
self.keep_original_representation
|
||||
and "representations" not in instance.data
|
||||
|
|
@ -71,7 +72,7 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
staging_dir = self.staging_dir(instance)
|
||||
|
||||
# add default preset type for thumbnail and reviewable video
|
||||
# update them with settings and overide in case the same
|
||||
# update them with settings and override in case the same
|
||||
# are found in there
|
||||
export_presets = deepcopy(self.default_presets)
|
||||
export_presets.update(self.export_presets_mapping)
|
||||
|
|
@ -84,6 +85,7 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
preset_file = preset_config["xml_preset_file"]
|
||||
preset_dir = preset_config["xml_preset_dir"]
|
||||
repre_tags = preset_config["representation_tags"]
|
||||
color_out = preset_config["colorspace_out"]
|
||||
|
||||
# validate xml preset file is filled
|
||||
if preset_file == "":
|
||||
|
|
@ -129,17 +131,31 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
opfapi.export_clip(
|
||||
export_dir_path, duplclip, preset_path, **kwargs)
|
||||
|
||||
extension = preset_config["ext"]
|
||||
# create representation data
|
||||
representation_data = {
|
||||
"name": unique_name,
|
||||
"outputName": unique_name,
|
||||
"ext": preset_config["ext"],
|
||||
"ext": extension,
|
||||
"stagingDir": export_dir_path,
|
||||
"tags": repre_tags
|
||||
"tags": repre_tags,
|
||||
"data": {
|
||||
"colorspace": color_out
|
||||
}
|
||||
}
|
||||
|
||||
# collect all available content of export dir
|
||||
files = os.listdir(export_dir_path)
|
||||
|
||||
# make sure no nested folders inside
|
||||
n_stage_dir, n_files = self._unfolds_nested_folders(
|
||||
export_dir_path, files, extension)
|
||||
|
||||
# fix representation in case of nested folders
|
||||
if n_stage_dir:
|
||||
representation_data["stagingDir"] = n_stage_dir
|
||||
files = n_files
|
||||
|
||||
# add files to represetation but add
|
||||
# imagesequence as list
|
||||
if (
|
||||
|
|
@ -170,3 +186,63 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
|
||||
self.log.debug("All representations: {}".format(
|
||||
pformat(instance.data["representations"])))
|
||||
|
||||
def _unfolds_nested_folders(self, stage_dir, files_list, ext):
|
||||
"""Unfolds nested folders
|
||||
|
||||
Args:
|
||||
stage_dir (str): path string with directory
|
||||
files_list (list): list of file names
|
||||
ext (str): extension (jpg)[without dot]
|
||||
|
||||
Raises:
|
||||
IOError: in case no files were collected form any directory
|
||||
|
||||
Returns:
|
||||
str, list: new staging dir path, new list of file names
|
||||
or
|
||||
None, None: In case single file in `files_list`
|
||||
"""
|
||||
# exclude single files which are having extension
|
||||
# the same as input ext attr
|
||||
if (
|
||||
# only one file in list
|
||||
len(files_list) == 1
|
||||
# file is having extension as input
|
||||
and ext in os.path.splitext(files_list[0])[-1]
|
||||
):
|
||||
return None, None
|
||||
elif (
|
||||
# more then one file in list
|
||||
len(files_list) >= 1
|
||||
# extension is correct
|
||||
and ext in os.path.splitext(files_list[0])[-1]
|
||||
# test file exists
|
||||
and os.path.exists(
|
||||
os.path.join(stage_dir, files_list[0])
|
||||
)
|
||||
):
|
||||
return None, None
|
||||
|
||||
new_stage_dir = None
|
||||
new_files_list = []
|
||||
for file in files_list:
|
||||
search_path = os.path.join(stage_dir, file)
|
||||
if not os.path.isdir(search_path):
|
||||
continue
|
||||
for root, _dirs, files in os.walk(search_path):
|
||||
for _file in files:
|
||||
_fn, _ext = os.path.splitext(_file)
|
||||
if ext.lower() != _ext[1:].lower():
|
||||
continue
|
||||
new_files_list.append(_file)
|
||||
if not new_stage_dir:
|
||||
new_stage_dir = root
|
||||
|
||||
if not new_stage_dir:
|
||||
raise AssertionError(
|
||||
"Files in `{}` are not correct! Check `{}`".format(
|
||||
files_list, stage_dir)
|
||||
)
|
||||
|
||||
return new_stage_dir, new_files_list
|
||||
|
|
|
|||
24
openpype/hosts/flame/plugins/publish/validate_source_clip.py
Normal file
24
openpype/hosts/flame/plugins/publish/validate_source_clip.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
import pyblish
|
||||
|
||||
|
||||
@pyblish.api.log
|
||||
class ValidateSourceClip(pyblish.api.InstancePlugin):
|
||||
"""Validate instance is not having empty `flameSourceClip`"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Source Clip"
|
||||
hosts = ["flame"]
|
||||
families = ["clip"]
|
||||
|
||||
def process(self, instance):
|
||||
flame_source_clip = instance.data["flameSourceClip"]
|
||||
|
||||
self.log.debug("_ flame_source_clip: {}".format(flame_source_clip))
|
||||
|
||||
if flame_source_clip is None:
|
||||
raise AttributeError((
|
||||
"Timeline segment `{}` is not having "
|
||||
"relative clip in reels. Please make sure "
|
||||
"you push `Save Sources` button in Conform Tab").format(
|
||||
instance.data["asset"]
|
||||
))
|
||||
|
|
@ -29,7 +29,7 @@
|
|||
<fileType>Jpeg</fileType>
|
||||
<codec>923688</codec>
|
||||
<codecProfile></codecProfile>
|
||||
<namePattern><segment name></namePattern>
|
||||
<namePattern><shot name></namePattern>
|
||||
<compressionQuality>100</compressionQuality>
|
||||
<transferCharacteristic>2</transferCharacteristic>
|
||||
<colorimetricSpecification>4</colorimetricSpecification>
|
||||
|
|
@ -27,7 +27,7 @@
|
|||
</sequence>
|
||||
<movie>
|
||||
<fileType>QuickTime</fileType>
|
||||
<namePattern><segment name></namePattern>
|
||||
<namePattern><shot name></namePattern>
|
||||
<yuvHeadroom>0</yuvHeadroom>
|
||||
<yuvColourSpace>PCS_709</yuvColourSpace>
|
||||
<operationalPattern>None</operationalPattern>
|
||||
|
|
@ -43,7 +43,7 @@
|
|||
<targetVersion>2021</targetVersion>
|
||||
<pathSuffix>/profiles/.33622016/HDTV_720p_8Mbits.cdxprof</pathSuffix>
|
||||
</codecProfile>
|
||||
<namePattern><segment name>_<video codec></namePattern>
|
||||
<namePattern><shot name>_<video codec></namePattern>
|
||||
<compressionQuality>50</compressionQuality>
|
||||
<transferCharacteristic>2</transferCharacteristic>
|
||||
<colorimetricSpecification>4</colorimetricSpecification>
|
||||
|
|
@ -8,7 +8,7 @@ PLUGIN_DIR = os.path.dirname(os.path.dirname(__file__))
|
|||
EXPORT_PRESETS_DIR = os.path.join(PLUGIN_DIR, "export_preset")
|
||||
|
||||
CONFIG_DIR = os.path.join(os.path.expanduser(
|
||||
"~/.openpype"), "openpype_flame_to_ftrack")
|
||||
"~/.openpype"), "openpype_babypublisher")
|
||||
|
||||
|
||||
@contextmanager
|
||||
|
|
@ -360,6 +360,8 @@ class FtrackComponentCreator:
|
|||
|
||||
|
||||
class FtrackEntityOperator:
|
||||
existing_tasks = []
|
||||
|
||||
def __init__(self, session, project_entity):
|
||||
self.session = session
|
||||
self.project_entity = project_entity
|
||||
|
|
@ -392,10 +394,7 @@ class FtrackEntityOperator:
|
|||
query = '{} where name is "{}" and project_id is "{}"'.format(
|
||||
type, name, self.project_entity["id"])
|
||||
|
||||
try:
|
||||
entity = session.query(query).one()
|
||||
except Exception:
|
||||
entity = None
|
||||
entity = session.query(query).first()
|
||||
|
||||
# if entity doesnt exist then create one
|
||||
if not entity:
|
||||
|
|
@ -430,10 +429,21 @@ class FtrackEntityOperator:
|
|||
return parents
|
||||
|
||||
def create_task(self, task_type, task_types, parent):
|
||||
existing_task = [
|
||||
_exising_tasks = [
|
||||
child for child in parent['children']
|
||||
if child.entity_type.lower() == 'task'
|
||||
if child['name'].lower() in task_type.lower()
|
||||
]
|
||||
|
||||
# add task into existing tasks if they are not already there
|
||||
for _t in _exising_tasks:
|
||||
if _t in self.existing_tasks:
|
||||
continue
|
||||
self.existing_tasks.append(_t)
|
||||
|
||||
existing_task = [
|
||||
task for task in self.existing_tasks
|
||||
if task['name'].lower() in task_type.lower()
|
||||
if task['parent'] == parent
|
||||
]
|
||||
|
||||
if existing_task:
|
||||
|
|
@ -445,4 +455,5 @@ class FtrackEntityOperator:
|
|||
})
|
||||
task["type"] = task_types[task_type]
|
||||
|
||||
self.existing_tasks.append(task)
|
||||
return task
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
from PySide2 import QtWidgets, QtCore
|
||||
from Qt import QtWidgets, QtCore
|
||||
|
||||
import uiwidgets
|
||||
import app_utils
|
||||
|
|
@ -33,11 +33,12 @@ class MainWindow(QtWidgets.QWidget):
|
|||
self.panel_class.clear_temp_data()
|
||||
self.panel_class.close()
|
||||
clear_inner_modules()
|
||||
ftrack_lib.FtrackEntityOperator.existing_tasks = []
|
||||
# now the panel can be closed
|
||||
event.accept()
|
||||
|
||||
|
||||
class FlameToFtrackPanel(object):
|
||||
class FlameBabyPublisherPanel(object):
|
||||
session = None
|
||||
temp_data_dir = None
|
||||
processed_components = []
|
||||
|
|
@ -78,7 +79,7 @@ class FlameToFtrackPanel(object):
|
|||
|
||||
# creating ui
|
||||
self.window.setMinimumSize(1500, 600)
|
||||
self.window.setWindowTitle('Sequence Shots to Ftrack')
|
||||
self.window.setWindowTitle('OpenPype: Baby-publisher')
|
||||
self.window.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)
|
||||
self.window.setAttribute(QtCore.Qt.WA_DeleteOnClose)
|
||||
self.window.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
|
|
@ -469,10 +470,14 @@ class FlameToFtrackPanel(object):
|
|||
for sequence in self.selection:
|
||||
frame_rate = float(str(sequence.frame_rate)[:-4])
|
||||
for ver in sequence.versions:
|
||||
for tracks in ver.tracks:
|
||||
for segment in tracks.segments:
|
||||
for track in ver.tracks:
|
||||
if len(track.segments) == 0 and track.hidden:
|
||||
continue
|
||||
for segment in track.segments:
|
||||
print(segment.attributes)
|
||||
if str(segment.name)[1:-1] == "":
|
||||
if segment.name.get_value() == "":
|
||||
continue
|
||||
if segment.hidden.get_value() is True:
|
||||
continue
|
||||
# get clip frame duration
|
||||
record_duration = str(segment.record_duration)[1:-1]
|
||||
|
|
@ -492,11 +497,11 @@ class FlameToFtrackPanel(object):
|
|||
|
||||
# Add timeline segment to tree
|
||||
QtWidgets.QTreeWidgetItem(self.tree, [
|
||||
str(sequence.name)[1:-1], # seq
|
||||
str(segment.name)[1:-1], # shot
|
||||
sequence.name.get_value(), # seq name
|
||||
segment.shot_name.get_value(), # shot name
|
||||
str(clip_duration), # clip duration
|
||||
shot_description, # shot description
|
||||
str(segment.comment)[1:-1] # task description
|
||||
segment.comment.get_value() # task description
|
||||
]).setFlags(
|
||||
QtCore.Qt.ItemIsEditable
|
||||
| QtCore.Qt.ItemIsEnabled
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
from PySide2 import QtWidgets, QtCore
|
||||
from Qt import QtWidgets, QtCore
|
||||
|
||||
|
||||
class FlameLabel(QtWidgets.QLabel):
|
||||
|
|
@ -3,6 +3,10 @@ from __future__ import print_function
|
|||
import os
|
||||
import sys
|
||||
|
||||
# only testing dependency for nested modules in package
|
||||
import six # noqa
|
||||
|
||||
|
||||
SCRIPT_DIR = os.path.dirname(__file__)
|
||||
PACKAGE_DIR = os.path.join(SCRIPT_DIR, "modules")
|
||||
sys.path.append(PACKAGE_DIR)
|
||||
|
|
@ -12,10 +16,11 @@ def flame_panel_executor(selection):
|
|||
if "panel_app" in sys.modules.keys():
|
||||
print("panel_app module is already loaded")
|
||||
del sys.modules["panel_app"]
|
||||
import panel_app
|
||||
reload(panel_app) # noqa
|
||||
print("panel_app module removed from sys.modules")
|
||||
|
||||
import panel_app
|
||||
panel_app.FlameToFtrackPanel(selection)
|
||||
panel_app.FlameBabyPublisherPanel(selection)
|
||||
|
||||
|
||||
def scope_sequence(selection):
|
||||
|
|
@ -26,7 +31,7 @@ def scope_sequence(selection):
|
|||
def get_media_panel_custom_ui_actions():
|
||||
return [
|
||||
{
|
||||
"name": "OpenPype: Ftrack",
|
||||
"name": "OpenPype: Baby-publisher",
|
||||
"actions": [
|
||||
{
|
||||
"name": "Create Shots",
|
||||
|
|
@ -1,14 +1,27 @@
|
|||
from .pipeline import (
|
||||
install,
|
||||
uninstall
|
||||
uninstall,
|
||||
|
||||
ls,
|
||||
|
||||
imprint_container,
|
||||
parse_container,
|
||||
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk
|
||||
)
|
||||
|
||||
from .utils import (
|
||||
setup
|
||||
from .workio import (
|
||||
open_file,
|
||||
save_file,
|
||||
current_file,
|
||||
has_unsaved_changes,
|
||||
file_extensions,
|
||||
work_root
|
||||
)
|
||||
|
||||
|
||||
from .lib import (
|
||||
maintained_selection,
|
||||
get_additional_data,
|
||||
update_frame_range
|
||||
)
|
||||
|
|
@ -20,11 +33,24 @@ __all__ = [
|
|||
# pipeline
|
||||
"install",
|
||||
"uninstall",
|
||||
"ls",
|
||||
|
||||
# utils
|
||||
"setup",
|
||||
"imprint_container",
|
||||
"parse_container",
|
||||
|
||||
"get_current_comp",
|
||||
"comp_lock_and_undo_chunk",
|
||||
|
||||
# workio
|
||||
"open_file",
|
||||
"save_file",
|
||||
"current_file",
|
||||
"has_unsaved_changes",
|
||||
"file_extensions",
|
||||
"work_root",
|
||||
|
||||
# lib
|
||||
"maintained_selection",
|
||||
"get_additional_data",
|
||||
"update_frame_range",
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,13 @@
|
|||
import os
|
||||
import sys
|
||||
import re
|
||||
import contextlib
|
||||
|
||||
from Qt import QtGui
|
||||
import avalon.fusion
|
||||
|
||||
import avalon.api
|
||||
from avalon import io
|
||||
from .pipeline import get_current_comp, comp_lock_and_undo_chunk
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self._project = None
|
||||
|
|
@ -24,7 +29,7 @@ def update_frame_range(start, end, comp=None, set_render_range=True):
|
|||
"""
|
||||
|
||||
if not comp:
|
||||
comp = avalon.fusion.get_current_comp()
|
||||
comp = get_current_comp()
|
||||
|
||||
attrs = {
|
||||
"COMPN_GlobalStart": start,
|
||||
|
|
@ -37,7 +42,7 @@ def update_frame_range(start, end, comp=None, set_render_range=True):
|
|||
"COMPN_RenderEnd": end
|
||||
})
|
||||
|
||||
with avalon.fusion.comp_lock_and_undo_chunk(comp):
|
||||
with comp_lock_and_undo_chunk(comp):
|
||||
comp.SetAttrs(attrs)
|
||||
|
||||
|
||||
|
|
@ -140,3 +145,51 @@ def switch_item(container,
|
|||
avalon.api.switch(container, representation)
|
||||
|
||||
return representation
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_selection():
|
||||
comp = get_current_comp()
|
||||
previous_selection = comp.GetToolList(True).values()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
flow = comp.CurrentFrame.FlowView
|
||||
flow.Select() # No args equals clearing selection
|
||||
if previous_selection:
|
||||
for tool in previous_selection:
|
||||
flow.Select(tool, True)
|
||||
|
||||
|
||||
def get_frame_path(path):
|
||||
"""Get filename for the Fusion Saver with padded number as '#'
|
||||
|
||||
>>> get_frame_path("C:/test.exr")
|
||||
('C:/test', 4, '.exr')
|
||||
|
||||
>>> get_frame_path("filename.00.tif")
|
||||
('filename.', 2, '.tif')
|
||||
|
||||
>>> get_frame_path("foobar35.tif")
|
||||
('foobar', 2, '.tif')
|
||||
|
||||
Args:
|
||||
path (str): The path to render to.
|
||||
|
||||
Returns:
|
||||
tuple: head, padding, tail (extension)
|
||||
|
||||
"""
|
||||
filename, ext = os.path.splitext(path)
|
||||
|
||||
# Find a final number group
|
||||
match = re.match('.*?([0-9]+)$', filename)
|
||||
if match:
|
||||
padding = len(match.group(1))
|
||||
# remove number from end since fusion
|
||||
# will swap it with the frame number
|
||||
filename = filename[:-padding]
|
||||
else:
|
||||
padding = 4 # default Fusion padding
|
||||
|
||||
return filename, padding, ext
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import sys
|
|||
|
||||
from Qt import QtWidgets, QtCore
|
||||
|
||||
from openpype import style
|
||||
from openpype.tools.utils import host_tools
|
||||
|
||||
from openpype.hosts.fusion.scripts import (
|
||||
|
|
@ -54,13 +55,13 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
)
|
||||
self.render_mode_widget = None
|
||||
self.setWindowTitle("OpenPype")
|
||||
workfiles_btn = QtWidgets.QPushButton("Workfiles ...", self)
|
||||
create_btn = QtWidgets.QPushButton("Create ...", self)
|
||||
publish_btn = QtWidgets.QPushButton("Publish ...", self)
|
||||
load_btn = QtWidgets.QPushButton("Load ...", self)
|
||||
inventory_btn = QtWidgets.QPushButton("Inventory ...", self)
|
||||
libload_btn = QtWidgets.QPushButton("Library ...", self)
|
||||
rendermode_btn = QtWidgets.QPushButton("Set render mode ...", self)
|
||||
workfiles_btn = QtWidgets.QPushButton("Workfiles...", self)
|
||||
create_btn = QtWidgets.QPushButton("Create...", self)
|
||||
publish_btn = QtWidgets.QPushButton("Publish...", self)
|
||||
load_btn = QtWidgets.QPushButton("Load...", self)
|
||||
manager_btn = QtWidgets.QPushButton("Manage...", self)
|
||||
libload_btn = QtWidgets.QPushButton("Library...", self)
|
||||
rendermode_btn = QtWidgets.QPushButton("Set render mode...", self)
|
||||
duplicate_with_inputs_btn = QtWidgets.QPushButton(
|
||||
"Duplicate with input connections", self
|
||||
)
|
||||
|
|
@ -75,7 +76,7 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
layout.addWidget(create_btn)
|
||||
layout.addWidget(publish_btn)
|
||||
layout.addWidget(load_btn)
|
||||
layout.addWidget(inventory_btn)
|
||||
layout.addWidget(manager_btn)
|
||||
|
||||
layout.addWidget(Spacer(15, self))
|
||||
|
||||
|
|
@ -96,7 +97,7 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
create_btn.clicked.connect(self.on_create_clicked)
|
||||
publish_btn.clicked.connect(self.on_publish_clicked)
|
||||
load_btn.clicked.connect(self.on_load_clicked)
|
||||
inventory_btn.clicked.connect(self.on_inventory_clicked)
|
||||
manager_btn.clicked.connect(self.on_manager_clicked)
|
||||
libload_btn.clicked.connect(self.on_libload_clicked)
|
||||
rendermode_btn.clicked.connect(self.on_rendernode_clicked)
|
||||
duplicate_with_inputs_btn.clicked.connect(
|
||||
|
|
@ -119,8 +120,8 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
print("Clicked Load")
|
||||
host_tools.show_loader(use_context=True)
|
||||
|
||||
def on_inventory_clicked(self):
|
||||
print("Clicked Inventory")
|
||||
def on_manager_clicked(self):
|
||||
print("Clicked Manager")
|
||||
host_tools.show_scene_inventory()
|
||||
|
||||
def on_libload_clicked(self):
|
||||
|
|
@ -128,7 +129,6 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
host_tools.show_library_loader()
|
||||
|
||||
def on_rendernode_clicked(self):
|
||||
from avalon import style
|
||||
print("Clicked Set Render Mode")
|
||||
if self.render_mode_widget is None:
|
||||
window = set_rendermode.SetRenderMode()
|
||||
|
|
|
|||
|
|
@ -2,10 +2,16 @@
|
|||
Basic avalon integration
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import contextlib
|
||||
|
||||
import pyblish.api
|
||||
import avalon.api
|
||||
from avalon.pipeline import AVALON_CONTAINER_ID
|
||||
|
||||
from avalon import api as avalon
|
||||
from pyblish import api as pyblish
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import LegacyCreator
|
||||
import openpype.hosts.fusion
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
|
@ -19,6 +25,14 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
|||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
|
||||
class CompLogHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
entry = self.format(record)
|
||||
comp = get_current_comp()
|
||||
if comp:
|
||||
comp.Print(entry)
|
||||
|
||||
|
||||
def install():
|
||||
"""Install fusion-specific functionality of avalon-core.
|
||||
|
||||
|
|
@ -30,25 +44,32 @@ def install():
|
|||
See the Maya equivalent for inspiration on how to implement this.
|
||||
|
||||
"""
|
||||
# Remove all handlers associated with the root logger object, because
|
||||
# that one sometimes logs as "warnings" incorrectly.
|
||||
for handler in logging.root.handlers[:]:
|
||||
logging.root.removeHandler(handler)
|
||||
|
||||
# Disable all families except for the ones we explicitly want to see
|
||||
family_states = ["imagesequence",
|
||||
"camera",
|
||||
"pointcache"]
|
||||
avalon.data["familiesStateDefault"] = False
|
||||
avalon.data["familiesStateToggled"] = family_states
|
||||
# Attach default logging handler that prints to active comp
|
||||
logger = logging.getLogger()
|
||||
formatter = logging.Formatter(fmt="%(message)s\n")
|
||||
handler = CompLogHandler()
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
log.info("openpype.hosts.fusion installed")
|
||||
|
||||
pyblish.register_host("fusion")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
pyblish.api.register_host("fusion")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
log.info("Registering Fusion plug-ins..")
|
||||
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
pyblish.api.register_callback(
|
||||
"instanceToggled", on_pyblish_instance_toggled
|
||||
)
|
||||
|
||||
|
||||
def uninstall():
|
||||
|
|
@ -62,22 +83,23 @@ def uninstall():
|
|||
modifying the menu or registered families.
|
||||
|
||||
"""
|
||||
pyblish.deregister_host("fusion")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
pyblish.api.deregister_host("fusion")
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
log.info("Deregistering Fusion plug-ins..")
|
||||
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.deregister_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
avalon.api.deregister_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
avalon.api.deregister_plugin_path(
|
||||
avalon.api.InventoryAction, INVENTORY_PATH
|
||||
)
|
||||
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
pyblish.api.deregister_callback(
|
||||
"instanceToggled", on_pyblish_instance_toggled
|
||||
)
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, new_value, old_value):
|
||||
"""Toggle saver tool passthrough states on instance toggles."""
|
||||
|
||||
from avalon.fusion import comp_lock_and_undo_chunk
|
||||
|
||||
comp = instance.context.data.get("currentComp")
|
||||
if not comp:
|
||||
return
|
||||
|
|
@ -97,3 +119,106 @@ def on_pyblish_instance_toggled(instance, new_value, old_value):
|
|||
current = attrs["TOOLB_PassThrough"]
|
||||
if current != passthrough:
|
||||
tool.SetAttrs({"TOOLB_PassThrough": passthrough})
|
||||
|
||||
|
||||
def ls():
|
||||
"""List containers from active Fusion scene
|
||||
|
||||
This is the host-equivalent of api.ls(), but instead of listing
|
||||
assets on disk, it lists assets already loaded in Fusion; once loaded
|
||||
they are called 'containers'
|
||||
|
||||
Yields:
|
||||
dict: container
|
||||
|
||||
"""
|
||||
|
||||
comp = get_current_comp()
|
||||
tools = comp.GetToolList(False, "Loader").values()
|
||||
|
||||
for tool in tools:
|
||||
container = parse_container(tool)
|
||||
if container:
|
||||
yield container
|
||||
|
||||
|
||||
def imprint_container(tool,
|
||||
name,
|
||||
namespace,
|
||||
context,
|
||||
loader=None):
|
||||
"""Imprint a Loader with metadata
|
||||
|
||||
Containerisation enables a tracking of version, author and origin
|
||||
for loaded assets.
|
||||
|
||||
Arguments:
|
||||
tool (object): The node in Fusion to imprint as container, usually a
|
||||
Loader.
|
||||
name (str): Name of resulting assembly
|
||||
namespace (str): Namespace under which to host container
|
||||
context (dict): Asset information
|
||||
loader (str, optional): Name of loader used to produce this container.
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
|
||||
data = [
|
||||
("schema", "openpype:container-2.0"),
|
||||
("id", AVALON_CONTAINER_ID),
|
||||
("name", str(name)),
|
||||
("namespace", str(namespace)),
|
||||
("loader", str(loader)),
|
||||
("representation", str(context["representation"]["_id"])),
|
||||
]
|
||||
|
||||
for key, value in data:
|
||||
tool.SetData("avalon.{}".format(key), value)
|
||||
|
||||
|
||||
def parse_container(tool):
|
||||
"""Returns imprinted container data of a tool
|
||||
|
||||
This reads the imprinted data from `imprint_container`.
|
||||
|
||||
"""
|
||||
|
||||
data = tool.GetData('avalon')
|
||||
if not isinstance(data, dict):
|
||||
return
|
||||
|
||||
# If not all required data return the empty container
|
||||
required = ['schema', 'id', 'name',
|
||||
'namespace', 'loader', 'representation']
|
||||
if not all(key in data for key in required):
|
||||
return
|
||||
|
||||
container = {key: data[key] for key in required}
|
||||
|
||||
# Store the tool's name
|
||||
container["objectName"] = tool.Name
|
||||
|
||||
# Store reference to the tool object
|
||||
container["_tool"] = tool
|
||||
|
||||
return container
|
||||
|
||||
|
||||
def get_current_comp():
|
||||
"""Hack to get current comp in this session"""
|
||||
fusion = getattr(sys.modules["__main__"], "fusion", None)
|
||||
return fusion.CurrentComp if fusion else None
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def comp_lock_and_undo_chunk(comp, undo_queue_name="Script CMD"):
|
||||
"""Lock comp and open an undo chunk during the context"""
|
||||
try:
|
||||
comp.Lock()
|
||||
comp.StartUndo(undo_queue_name)
|
||||
yield
|
||||
finally:
|
||||
comp.Unlock()
|
||||
comp.EndUndo()
|
||||
|
|
|
|||
|
|
@ -1,86 +0,0 @@
|
|||
#! python3
|
||||
|
||||
"""
|
||||
Fusion tools for setting environment
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from openpype.api import Logger
|
||||
import openpype.hosts.fusion
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
|
||||
def _sync_utility_scripts(env=None):
|
||||
""" Synchronizing basic utlility scripts for resolve.
|
||||
|
||||
To be able to run scripts from inside `Fusion/Workspace/Scripts` menu
|
||||
all scripts has to be accessible from defined folder.
|
||||
"""
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# initiate inputs
|
||||
scripts = {}
|
||||
us_env = env.get("FUSION_UTILITY_SCRIPTS_SOURCE_DIR")
|
||||
us_dir = env.get("FUSION_UTILITY_SCRIPTS_DIR", "")
|
||||
us_paths = [os.path.join(
|
||||
os.path.dirname(os.path.abspath(openpype.hosts.fusion.__file__)),
|
||||
"utility_scripts"
|
||||
)]
|
||||
|
||||
# collect script dirs
|
||||
if us_env:
|
||||
log.info(f"Utility Scripts Env: `{us_env}`")
|
||||
us_paths = us_env.split(
|
||||
os.pathsep) + us_paths
|
||||
|
||||
# collect scripts from dirs
|
||||
for path in us_paths:
|
||||
scripts.update({path: os.listdir(path)})
|
||||
|
||||
log.info(f"Utility Scripts Dir: `{us_paths}`")
|
||||
log.info(f"Utility Scripts: `{scripts}`")
|
||||
|
||||
# make sure no script file is in folder
|
||||
if next((s for s in os.listdir(us_dir)), None):
|
||||
for s in os.listdir(us_dir):
|
||||
path = os.path.normpath(
|
||||
os.path.join(us_dir, s))
|
||||
log.info(f"Removing `{path}`...")
|
||||
|
||||
# remove file or directory if not in our folders
|
||||
if not os.path.isdir(path):
|
||||
os.remove(path)
|
||||
else:
|
||||
shutil.rmtree(path)
|
||||
|
||||
# copy scripts into Resolve's utility scripts dir
|
||||
for d, sl in scripts.items():
|
||||
# directory and scripts list
|
||||
for s in sl:
|
||||
# script in script list
|
||||
src = os.path.normpath(os.path.join(d, s))
|
||||
dst = os.path.normpath(os.path.join(us_dir, s))
|
||||
|
||||
log.info(f"Copying `{src}` to `{dst}`...")
|
||||
|
||||
# copy file or directory from our folders to fusion's folder
|
||||
if not os.path.isdir(src):
|
||||
shutil.copy2(src, dst)
|
||||
else:
|
||||
shutil.copytree(src, dst)
|
||||
|
||||
|
||||
def setup(env=None):
|
||||
""" Wrapper installer started from pype.hooks.fusion.FusionPrelaunch()
|
||||
"""
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# synchronize resolve utility scripts
|
||||
_sync_utility_scripts(env)
|
||||
|
||||
log.info("Fusion Pype wrapper has been installed")
|
||||
45
openpype/hosts/fusion/api/workio.py
Normal file
45
openpype/hosts/fusion/api/workio.py
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
"""Host API required Work Files tool"""
|
||||
import sys
|
||||
import os
|
||||
from avalon import api
|
||||
from .pipeline import get_current_comp
|
||||
|
||||
|
||||
def file_extensions():
|
||||
return api.HOST_WORKFILE_EXTENSIONS["fusion"]
|
||||
|
||||
|
||||
def has_unsaved_changes():
|
||||
comp = get_current_comp()
|
||||
return comp.GetAttrs()["COMPB_Modified"]
|
||||
|
||||
|
||||
def save_file(filepath):
|
||||
comp = get_current_comp()
|
||||
comp.Save(filepath)
|
||||
|
||||
|
||||
def open_file(filepath):
|
||||
# Hack to get fusion, see
|
||||
# openpype.hosts.fusion.api.pipeline.get_current_comp()
|
||||
fusion = getattr(sys.modules["__main__"], "fusion", None)
|
||||
|
||||
return fusion.LoadComp(filepath)
|
||||
|
||||
|
||||
def current_file():
|
||||
comp = get_current_comp()
|
||||
current_filepath = comp.GetAttrs()["COMPS_FileName"]
|
||||
if not current_filepath:
|
||||
return None
|
||||
|
||||
return current_filepath
|
||||
|
||||
|
||||
def work_root(session):
|
||||
work_dir = session["AVALON_WORKDIR"]
|
||||
scene_dir = session.get("AVALON_SCENEDIR")
|
||||
if scene_dir:
|
||||
return os.path.join(work_dir, scene_dir)
|
||||
else:
|
||||
return work_dir
|
||||
|
|
@ -1,7 +1,8 @@
|
|||
import os
|
||||
import importlib
|
||||
import shutil
|
||||
|
||||
import openpype.hosts.fusion
|
||||
from openpype.lib import PreLaunchHook, ApplicationLaunchFailed
|
||||
from openpype.hosts.fusion.api import utils
|
||||
|
||||
|
||||
class FusionPrelaunch(PreLaunchHook):
|
||||
|
|
@ -13,40 +14,101 @@ class FusionPrelaunch(PreLaunchHook):
|
|||
|
||||
def execute(self):
|
||||
# making sure python 3.6 is installed at provided path
|
||||
py36_dir = os.path.normpath(self.launch_context.env.get("PYTHON36", ""))
|
||||
py36_dir = self.launch_context.env.get("PYTHON36")
|
||||
if not py36_dir:
|
||||
raise ApplicationLaunchFailed(
|
||||
"Required environment variable \"PYTHON36\" is not set."
|
||||
"\n\nFusion implementation requires to have"
|
||||
" installed Python 3.6"
|
||||
)
|
||||
|
||||
py36_dir = os.path.normpath(py36_dir)
|
||||
if not os.path.isdir(py36_dir):
|
||||
raise ApplicationLaunchFailed(
|
||||
"Python 3.6 is not installed at the provided path.\n"
|
||||
"Either make sure the 'environments/fusion.json' has "
|
||||
"'PYTHON36' set corectly or make sure Python 3.6 is installed "
|
||||
f"in the given path.\n\nPYTHON36: {py36_dir}"
|
||||
"Either make sure the environments in fusion settings has"
|
||||
" 'PYTHON36' set corectly or make sure Python 3.6 is installed"
|
||||
f" in the given path.\n\nPYTHON36: {py36_dir}"
|
||||
)
|
||||
self.log.info(f"Path to Fusion Python folder: '{py36_dir}'...")
|
||||
self.launch_context.env["PYTHON36"] = py36_dir
|
||||
|
||||
utility_dir = self.launch_context.env.get("FUSION_UTILITY_SCRIPTS_DIR")
|
||||
if not utility_dir:
|
||||
raise ApplicationLaunchFailed(
|
||||
"Required Fusion utility script dir environment variable"
|
||||
" \"FUSION_UTILITY_SCRIPTS_DIR\" is not set."
|
||||
)
|
||||
|
||||
# setting utility scripts dir for scripts syncing
|
||||
us_dir = os.path.normpath(
|
||||
self.launch_context.env.get("FUSION_UTILITY_SCRIPTS_DIR", "")
|
||||
)
|
||||
if not os.path.isdir(us_dir):
|
||||
utility_dir = os.path.normpath(utility_dir)
|
||||
if not os.path.isdir(utility_dir):
|
||||
raise ApplicationLaunchFailed(
|
||||
"Fusion utility script dir does not exist. Either make sure "
|
||||
"the 'environments/fusion.json' has "
|
||||
"'FUSION_UTILITY_SCRIPTS_DIR' set correctly or reinstall "
|
||||
f"Fusion.\n\nFUSION_UTILITY_SCRIPTS_DIR: '{us_dir}'"
|
||||
"the environments in fusion settings has"
|
||||
" 'FUSION_UTILITY_SCRIPTS_DIR' set correctly or reinstall "
|
||||
f"Fusion.\n\nFUSION_UTILITY_SCRIPTS_DIR: '{utility_dir}'"
|
||||
)
|
||||
|
||||
try:
|
||||
__import__("avalon.fusion")
|
||||
__import__("pyblish")
|
||||
self._sync_utility_scripts(self.launch_context.env)
|
||||
self.log.info("Fusion Pype wrapper has been installed")
|
||||
|
||||
except ImportError:
|
||||
self.log.warning(
|
||||
"pyblish: Could not load Fusion integration.",
|
||||
exc_info=True
|
||||
)
|
||||
def _sync_utility_scripts(self, env):
|
||||
""" Synchronizing basic utlility scripts for resolve.
|
||||
|
||||
else:
|
||||
# Resolve Setup integration
|
||||
importlib.reload(utils)
|
||||
utils.setup(self.launch_context.env)
|
||||
To be able to run scripts from inside `Fusion/Workspace/Scripts` menu
|
||||
all scripts has to be accessible from defined folder.
|
||||
"""
|
||||
if not env:
|
||||
env = {k: v for k, v in os.environ.items()}
|
||||
|
||||
# initiate inputs
|
||||
scripts = {}
|
||||
us_env = env.get("FUSION_UTILITY_SCRIPTS_SOURCE_DIR")
|
||||
us_dir = env.get("FUSION_UTILITY_SCRIPTS_DIR", "")
|
||||
us_paths = [os.path.join(
|
||||
os.path.dirname(os.path.abspath(openpype.hosts.fusion.__file__)),
|
||||
"utility_scripts"
|
||||
)]
|
||||
|
||||
# collect script dirs
|
||||
if us_env:
|
||||
self.log.info(f"Utility Scripts Env: `{us_env}`")
|
||||
us_paths = us_env.split(
|
||||
os.pathsep) + us_paths
|
||||
|
||||
# collect scripts from dirs
|
||||
for path in us_paths:
|
||||
scripts.update({path: os.listdir(path)})
|
||||
|
||||
self.log.info(f"Utility Scripts Dir: `{us_paths}`")
|
||||
self.log.info(f"Utility Scripts: `{scripts}`")
|
||||
|
||||
# make sure no script file is in folder
|
||||
if next((s for s in os.listdir(us_dir)), None):
|
||||
for s in os.listdir(us_dir):
|
||||
path = os.path.normpath(
|
||||
os.path.join(us_dir, s))
|
||||
self.log.info(f"Removing `{path}`...")
|
||||
|
||||
# remove file or directory if not in our folders
|
||||
if not os.path.isdir(path):
|
||||
os.remove(path)
|
||||
else:
|
||||
shutil.rmtree(path)
|
||||
|
||||
# copy scripts into Resolve's utility scripts dir
|
||||
for d, sl in scripts.items():
|
||||
# directory and scripts list
|
||||
for s in sl:
|
||||
# script in script list
|
||||
src = os.path.normpath(os.path.join(d, s))
|
||||
dst = os.path.normpath(os.path.join(us_dir, s))
|
||||
|
||||
self.log.info(f"Copying `{src}` to `{dst}`...")
|
||||
|
||||
# copy file or directory from our folders to fusion's folder
|
||||
if not os.path.isdir(src):
|
||||
shutil.copy2(src, dst)
|
||||
else:
|
||||
shutil.copytree(src, dst)
|
||||
|
|
|
|||
|
|
@ -1,21 +1,25 @@
|
|||
import os
|
||||
|
||||
import openpype.api
|
||||
from avalon import fusion
|
||||
from openpype.pipeline import create
|
||||
from openpype.hosts.fusion.api import (
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk
|
||||
)
|
||||
|
||||
|
||||
class CreateOpenEXRSaver(openpype.api.Creator):
|
||||
class CreateOpenEXRSaver(create.LegacyCreator):
|
||||
|
||||
name = "openexrDefault"
|
||||
label = "Create OpenEXR Saver"
|
||||
hosts = ["fusion"]
|
||||
family = "render"
|
||||
defaults = ["Main"]
|
||||
|
||||
def process(self):
|
||||
|
||||
file_format = "OpenEXRFormat"
|
||||
|
||||
comp = fusion.get_current_comp()
|
||||
comp = get_current_comp()
|
||||
|
||||
# todo: improve method of getting current environment
|
||||
# todo: pref avalon.Session over os.environ
|
||||
|
|
@ -25,7 +29,7 @@ class CreateOpenEXRSaver(openpype.api.Creator):
|
|||
filename = "{}..tiff".format(self.name)
|
||||
filepath = os.path.join(workdir, "render", filename)
|
||||
|
||||
with fusion.comp_lock_and_undo_chunk(comp):
|
||||
with comp_lock_and_undo_chunk(comp):
|
||||
args = (-32768, -32768) # Magical position numbers
|
||||
saver = comp.AddTool("Saver", *args)
|
||||
saver.SetAttrs({"TOOLS_Name": self.name})
|
||||
|
|
|
|||
|
|
@ -8,15 +8,17 @@ class FusionSelectContainers(api.InventoryAction):
|
|||
color = "#d8d8d8"
|
||||
|
||||
def process(self, containers):
|
||||
|
||||
import avalon.fusion
|
||||
from openpype.hosts.fusion.api import (
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk
|
||||
)
|
||||
|
||||
tools = [i["_tool"] for i in containers]
|
||||
|
||||
comp = avalon.fusion.get_current_comp()
|
||||
comp = get_current_comp()
|
||||
flow = comp.CurrentFrame.FlowView
|
||||
|
||||
with avalon.fusion.comp_lock_and_undo_chunk(comp, self.label):
|
||||
with comp_lock_and_undo_chunk(comp, self.label):
|
||||
# Clear selection
|
||||
flow.Select()
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,11 @@
|
|||
from avalon import api, style
|
||||
from avalon import api
|
||||
from Qt import QtGui, QtWidgets
|
||||
|
||||
import avalon.fusion
|
||||
from openpype import style
|
||||
from openpype.hosts.fusion.api import (
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk
|
||||
)
|
||||
|
||||
|
||||
class FusionSetToolColor(api.InventoryAction):
|
||||
|
|
@ -16,7 +20,7 @@ class FusionSetToolColor(api.InventoryAction):
|
|||
"""Color all selected tools the selected colors"""
|
||||
|
||||
result = []
|
||||
comp = avalon.fusion.get_current_comp()
|
||||
comp = get_current_comp()
|
||||
|
||||
# Get tool color
|
||||
first = containers[0]
|
||||
|
|
@ -33,7 +37,7 @@ class FusionSetToolColor(api.InventoryAction):
|
|||
if not picked_color:
|
||||
return
|
||||
|
||||
with avalon.fusion.comp_lock_and_undo_chunk(comp):
|
||||
with comp_lock_and_undo_chunk(comp):
|
||||
for container in containers:
|
||||
# Convert color to RGB 0-1 floats
|
||||
rgb_f = picked_color.getRgbF()
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue