mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 21:32:15 +01:00
Merge remote-tracking branch 'origin/develop' into enhancement/OP-3075_houdini-new-publisher
This commit is contained in:
commit
d170cdfaff
1404 changed files with 179193 additions and 26676 deletions
326
.all-contributorsrc
Normal file
326
.all-contributorsrc
Normal file
|
|
@ -0,0 +1,326 @@
|
|||
{
|
||||
"projectName": "OpenPype",
|
||||
"projectOwner": "pypeclub",
|
||||
"repoType": "github",
|
||||
"repoHost": "https://github.com",
|
||||
"files": [
|
||||
"README.md"
|
||||
],
|
||||
"imageSize": 100,
|
||||
"commit": true,
|
||||
"commitConvention": "none",
|
||||
"contributors": [
|
||||
{
|
||||
"login": "mkolar",
|
||||
"name": "Milan Kolar",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/3333008?v=4",
|
||||
"profile": "http://pype.club/",
|
||||
"contributions": [
|
||||
"code",
|
||||
"doc",
|
||||
"infra",
|
||||
"business",
|
||||
"content",
|
||||
"fundingFinding",
|
||||
"maintenance",
|
||||
"projectManagement",
|
||||
"review",
|
||||
"mentoring",
|
||||
"question"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "jakubjezek001",
|
||||
"name": "Jakub Ježek",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/40640033?v=4",
|
||||
"profile": "https://www.linkedin.com/in/jakubjezek79",
|
||||
"contributions": [
|
||||
"code",
|
||||
"doc",
|
||||
"infra",
|
||||
"content",
|
||||
"review",
|
||||
"maintenance",
|
||||
"mentoring",
|
||||
"projectManagement",
|
||||
"question"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "antirotor",
|
||||
"name": "Ondřej Samohel",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/33513211?v=4",
|
||||
"profile": "https://github.com/antirotor",
|
||||
"contributions": [
|
||||
"code",
|
||||
"doc",
|
||||
"infra",
|
||||
"content",
|
||||
"review",
|
||||
"maintenance",
|
||||
"mentoring",
|
||||
"projectManagement",
|
||||
"question"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "iLLiCiTiT",
|
||||
"name": "Jakub Trllo",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/43494761?v=4",
|
||||
"profile": "https://github.com/iLLiCiTiT",
|
||||
"contributions": [
|
||||
"code",
|
||||
"doc",
|
||||
"infra",
|
||||
"review",
|
||||
"maintenance",
|
||||
"question"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "kalisp",
|
||||
"name": "Petr Kalis",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/4457962?v=4",
|
||||
"profile": "https://github.com/kalisp",
|
||||
"contributions": [
|
||||
"code",
|
||||
"doc",
|
||||
"infra",
|
||||
"review",
|
||||
"maintenance",
|
||||
"question"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "64qam",
|
||||
"name": "64qam",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/26925793?v=4",
|
||||
"profile": "https://github.com/64qam",
|
||||
"contributions": [
|
||||
"code",
|
||||
"review",
|
||||
"doc",
|
||||
"infra",
|
||||
"projectManagement",
|
||||
"maintenance",
|
||||
"content",
|
||||
"userTesting"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "BigRoy",
|
||||
"name": "Roy Nieterau",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/2439881?v=4",
|
||||
"profile": "http://www.colorbleed.nl/",
|
||||
"contributions": [
|
||||
"code",
|
||||
"doc",
|
||||
"review",
|
||||
"mentoring",
|
||||
"question"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "tokejepsen",
|
||||
"name": "Toke Jepsen",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/1860085?v=4",
|
||||
"profile": "https://github.com/tokejepsen",
|
||||
"contributions": [
|
||||
"code",
|
||||
"doc",
|
||||
"review",
|
||||
"mentoring",
|
||||
"question"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "jrsndl",
|
||||
"name": "Jiri Sindelar",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/45896205?v=4",
|
||||
"profile": "https://github.com/jrsndl",
|
||||
"contributions": [
|
||||
"code",
|
||||
"review",
|
||||
"doc",
|
||||
"content",
|
||||
"tutorial",
|
||||
"userTesting"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "simonebarbieri",
|
||||
"name": "Simone Barbieri",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/1087869?v=4",
|
||||
"profile": "https://barbierisimone.com/",
|
||||
"contributions": [
|
||||
"code",
|
||||
"doc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "karimmozilla",
|
||||
"name": "karimmozilla",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/82811760?v=4",
|
||||
"profile": "http://karimmozilla.xyz/",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "Allan-I",
|
||||
"name": "Allan I. A.",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/76656700?v=4",
|
||||
"profile": "https://github.com/Allan-I",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "m-u-r-p-h-y",
|
||||
"name": "murphy",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/352795?v=4",
|
||||
"profile": "https://www.linkedin.com/in/mmuurrpphhyy/",
|
||||
"contributions": [
|
||||
"code",
|
||||
"review",
|
||||
"userTesting",
|
||||
"doc",
|
||||
"projectManagement"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "aardschok",
|
||||
"name": "Wijnand Koreman",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/26920875?v=4",
|
||||
"profile": "https://github.com/aardschok",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "zhoub",
|
||||
"name": "Bo Zhou",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/1798206?v=4",
|
||||
"profile": "http://jedimaster.cnblogs.com/",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "ClementHector",
|
||||
"name": "Clément Hector",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/7068597?v=4",
|
||||
"profile": "https://www.linkedin.com/in/clementhector/",
|
||||
"contributions": [
|
||||
"code",
|
||||
"review"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "davidlatwe",
|
||||
"name": "David Lai",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/3357009?v=4",
|
||||
"profile": "https://twitter.com/davidlatwe",
|
||||
"contributions": [
|
||||
"code",
|
||||
"review"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "2-REC",
|
||||
"name": "Derek ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/42170307?v=4",
|
||||
"profile": "https://github.com/2-REC",
|
||||
"contributions": [
|
||||
"code",
|
||||
"doc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "gabormarinov",
|
||||
"name": "Gábor Marinov",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/8620515?v=4",
|
||||
"profile": "https://github.com/gabormarinov",
|
||||
"contributions": [
|
||||
"code",
|
||||
"doc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "icyvapor",
|
||||
"name": "icyvapor",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/1195278?v=4",
|
||||
"profile": "https://github.com/icyvapor",
|
||||
"contributions": [
|
||||
"code",
|
||||
"doc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "jlorrain",
|
||||
"name": "Jérôme LORRAIN",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/7955673?v=4",
|
||||
"profile": "https://github.com/jlorrain",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "dmo-j-cube",
|
||||
"name": "David Morris-Oliveros",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/89823400?v=4",
|
||||
"profile": "https://github.com/dmo-j-cube",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "BenoitConnan",
|
||||
"name": "BenoitConnan",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/82808268?v=4",
|
||||
"profile": "https://github.com/BenoitConnan",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "Malthaldar",
|
||||
"name": "Malthaldar",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/33671694?v=4",
|
||||
"profile": "https://github.com/Malthaldar",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "svenneve",
|
||||
"name": "Sven Neve",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/2472863?v=4",
|
||||
"profile": "http://www.svenneve.com/",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "zafrs",
|
||||
"name": "zafrs",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/26890002?v=4",
|
||||
"profile": "https://github.com/zafrs",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "Tilix4",
|
||||
"name": "Félix David",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/22875539?v=4",
|
||||
"profile": "http://felixdavid.com/",
|
||||
"contributions": [
|
||||
"code",
|
||||
"doc"
|
||||
]
|
||||
}
|
||||
],
|
||||
"contributorsPerLine": 7,
|
||||
"skipCi": true
|
||||
}
|
||||
6
.github/workflows/prerelease.yml
vendored
6
.github/workflows/prerelease.yml
vendored
|
|
@ -69,18 +69,16 @@ jobs:
|
|||
run: |
|
||||
git config user.email ${{ secrets.CI_EMAIL }}
|
||||
git config user.name ${{ secrets.CI_USER }}
|
||||
cd repos/avalon-core
|
||||
git checkout main
|
||||
git pull
|
||||
cd ../..
|
||||
git add .
|
||||
git commit -m "[Automated] Bump version"
|
||||
tag_name="CI/${{ steps.version.outputs.next_tag }}"
|
||||
echo $tag_name
|
||||
git tag -a $tag_name -m "nightly build"
|
||||
|
||||
|
||||
- name: Push to protected main branch
|
||||
uses: CasperWA/push-protected@v2
|
||||
uses: CasperWA/push-protected@v2.10.0
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
branch: main
|
||||
|
|
|
|||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
|
|
@ -68,7 +68,7 @@ jobs:
|
|||
|
||||
- name: 🔏 Push to protected main branch
|
||||
if: steps.version.outputs.release_tag != 'skip'
|
||||
uses: CasperWA/push-protected@v2
|
||||
uses: CasperWA/push-protected@v2.10.0
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
branch: main
|
||||
|
|
|
|||
7
.gitignore
vendored
7
.gitignore
vendored
|
|
@ -70,6 +70,8 @@ coverage.xml
|
|||
##################
|
||||
node_modules
|
||||
package-lock.json
|
||||
package.json
|
||||
yarn.lock
|
||||
|
||||
openpype/premiere/ppro/js/debug.log
|
||||
|
||||
|
|
@ -100,3 +102,8 @@ website/.docusaurus
|
|||
|
||||
.poetry/
|
||||
.python-version
|
||||
.editorconfig
|
||||
.pre-commit-config.yaml
|
||||
mypy.ini
|
||||
|
||||
tools/run_eventserver.*
|
||||
|
|
|
|||
10
.gitmodules
vendored
10
.gitmodules
vendored
|
|
@ -1,3 +1,7 @@
|
|||
[submodule "repos/avalon-core"]
|
||||
path = repos/avalon-core
|
||||
url = https://github.com/pypeclub/avalon-core.git
|
||||
[submodule "tools/modules/powershell/BurntToast"]
|
||||
path = tools/modules/powershell/BurntToast
|
||||
url = https://github.com/Windos/BurntToast.git
|
||||
|
||||
[submodule "tools/modules/powershell/PSWriteColor"]
|
||||
path = tools/modules/powershell/PSWriteColor
|
||||
url = https://github.com/EvotecIT/PSWriteColor.git
|
||||
275
CHANGELOG.md
275
CHANGELOG.md
|
|
@ -1,141 +1,194 @@
|
|||
# Changelog
|
||||
|
||||
## [3.9.3](https://github.com/pypeclub/OpenPype/tree/3.9.3) (2022-04-07)
|
||||
## [3.14.1](https://github.com/pypeclub/OpenPype/tree/3.14.1) (2022-08-30)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.2...3.9.3)
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.0...3.14.1)
|
||||
|
||||
### 📖 Documentation
|
||||
|
||||
- Website Docs: Manager Ftrack fix broken links [\#2979](https://github.com/pypeclub/OpenPype/pull/2979)
|
||||
- Documentation: Few updates [\#3698](https://github.com/pypeclub/OpenPype/pull/3698)
|
||||
- Documentation: Settings development [\#3660](https://github.com/pypeclub/OpenPype/pull/3660)
|
||||
|
||||
**🆕 New features**
|
||||
|
||||
- Ftrack: Add description integrator [\#3027](https://github.com/pypeclub/OpenPype/pull/3027)
|
||||
- Publishing textures for Unreal [\#2988](https://github.com/pypeclub/OpenPype/pull/2988)
|
||||
- Maya to Unreal: Static and Skeletal Meshes [\#2978](https://github.com/pypeclub/OpenPype/pull/2978)
|
||||
- Webpublisher:change create flatten image into tri state [\#3678](https://github.com/pypeclub/OpenPype/pull/3678)
|
||||
- Blender: validators code correction with settings and defaults [\#3662](https://github.com/pypeclub/OpenPype/pull/3662)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- Ftrack: Add more options for note text of integrate ftrack note [\#3025](https://github.com/pypeclub/OpenPype/pull/3025)
|
||||
- Console Interpreter: Changed how console splitter size are reused on show [\#3016](https://github.com/pypeclub/OpenPype/pull/3016)
|
||||
- Deadline: Use more suitable name for sequence review logic [\#3015](https://github.com/pypeclub/OpenPype/pull/3015)
|
||||
- Nuke: add concurrency attr to deadline job [\#3005](https://github.com/pypeclub/OpenPype/pull/3005)
|
||||
- Photoshop: create image without instance [\#3001](https://github.com/pypeclub/OpenPype/pull/3001)
|
||||
- Deadline: priority configurable in Maya jobs [\#2995](https://github.com/pypeclub/OpenPype/pull/2995)
|
||||
- Workfiles tool: Save as published workfiles [\#2937](https://github.com/pypeclub/OpenPype/pull/2937)
|
||||
- General: Thumbnail can use project roots [\#3750](https://github.com/pypeclub/OpenPype/pull/3750)
|
||||
- Settings: Remove settings lock on tray exit [\#3720](https://github.com/pypeclub/OpenPype/pull/3720)
|
||||
- General: Added helper getters to modules manager [\#3712](https://github.com/pypeclub/OpenPype/pull/3712)
|
||||
- Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701)
|
||||
- Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700)
|
||||
- General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686)
|
||||
- Ftrack: Store ftrack entities on hierarchy integration to instances [\#3677](https://github.com/pypeclub/OpenPype/pull/3677)
|
||||
- Blender: ops refresh manager after process events [\#3663](https://github.com/pypeclub/OpenPype/pull/3663)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- Deadline: Fixed default value of use sequence for review [\#3033](https://github.com/pypeclub/OpenPype/pull/3033)
|
||||
- Settings UI: Version column can be extended so version are visible [\#3032](https://github.com/pypeclub/OpenPype/pull/3032)
|
||||
- General: Fix import after movements [\#3028](https://github.com/pypeclub/OpenPype/pull/3028)
|
||||
- Harmony: Added creating subset name for workfile from template [\#3024](https://github.com/pypeclub/OpenPype/pull/3024)
|
||||
- AfterEffects: Added creating subset name for workfile from template [\#3023](https://github.com/pypeclub/OpenPype/pull/3023)
|
||||
- General: Add example addons to ignored [\#3022](https://github.com/pypeclub/OpenPype/pull/3022)
|
||||
- Maya: Remove missing import [\#3017](https://github.com/pypeclub/OpenPype/pull/3017)
|
||||
- Ftrack: multiple reviewable componets [\#3012](https://github.com/pypeclub/OpenPype/pull/3012)
|
||||
- Tray publisher: Fixes after code movement [\#3010](https://github.com/pypeclub/OpenPype/pull/3010)
|
||||
- Nuke: fixing unicode type detection in effect loaders [\#3002](https://github.com/pypeclub/OpenPype/pull/3002)
|
||||
- Nuke: removing redundant Ftrack asset when farm publishing [\#2996](https://github.com/pypeclub/OpenPype/pull/2996)
|
||||
- Maya: Fix typo in getPanel argument `with\_focus` -\> `withFocus` [\#3753](https://github.com/pypeclub/OpenPype/pull/3753)
|
||||
- General: Smaller fixes of imports [\#3748](https://github.com/pypeclub/OpenPype/pull/3748)
|
||||
- General: Logger tweaks [\#3741](https://github.com/pypeclub/OpenPype/pull/3741)
|
||||
- Nuke: missing job dependency if multiple bake streams [\#3737](https://github.com/pypeclub/OpenPype/pull/3737)
|
||||
- Nuke: color-space settings from anatomy is working [\#3721](https://github.com/pypeclub/OpenPype/pull/3721)
|
||||
- Settings: Fix studio default anatomy save [\#3716](https://github.com/pypeclub/OpenPype/pull/3716)
|
||||
- Maya: Use project name instead of project code [\#3709](https://github.com/pypeclub/OpenPype/pull/3709)
|
||||
- Settings: Fix project overrides save [\#3708](https://github.com/pypeclub/OpenPype/pull/3708)
|
||||
- Workfiles tool: Fix published workfile filtering [\#3704](https://github.com/pypeclub/OpenPype/pull/3704)
|
||||
- PS, AE: Provide default variant value for workfile subset [\#3703](https://github.com/pypeclub/OpenPype/pull/3703)
|
||||
- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684)
|
||||
- Webpublisher: added check for empty context [\#3682](https://github.com/pypeclub/OpenPype/pull/3682)
|
||||
|
||||
**🔀 Refactored code**
|
||||
|
||||
- General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751)
|
||||
- General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744)
|
||||
- Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740)
|
||||
- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736)
|
||||
- Harmony: Defined harmony as addon [\#3734](https://github.com/pypeclub/OpenPype/pull/3734)
|
||||
- General: Module interfaces cleanup [\#3731](https://github.com/pypeclub/OpenPype/pull/3731)
|
||||
- AfterEffects: Move AE functions from general lib [\#3730](https://github.com/pypeclub/OpenPype/pull/3730)
|
||||
- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729)
|
||||
- AfterEffects: Define AfterEffects as module [\#3728](https://github.com/pypeclub/OpenPype/pull/3728)
|
||||
- General: Replace PypeLogger with Logger [\#3725](https://github.com/pypeclub/OpenPype/pull/3725)
|
||||
- Nuke: Define nuke as module [\#3724](https://github.com/pypeclub/OpenPype/pull/3724)
|
||||
- General: Move subset name functionality [\#3723](https://github.com/pypeclub/OpenPype/pull/3723)
|
||||
- General: Move creators plugin getter [\#3714](https://github.com/pypeclub/OpenPype/pull/3714)
|
||||
- General: Move constants from lib to client [\#3713](https://github.com/pypeclub/OpenPype/pull/3713)
|
||||
- Loader: Subset groups using client operations [\#3710](https://github.com/pypeclub/OpenPype/pull/3710)
|
||||
- TVPaint: Defined as module [\#3707](https://github.com/pypeclub/OpenPype/pull/3707)
|
||||
- StandalonePublisher: Define StandalonePublisher as module [\#3706](https://github.com/pypeclub/OpenPype/pull/3706)
|
||||
- TrayPublisher: Define TrayPublisher as module [\#3705](https://github.com/pypeclub/OpenPype/pull/3705)
|
||||
- General: Move context specific functions to context tools [\#3702](https://github.com/pypeclub/OpenPype/pull/3702)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Maya: Allow to select invalid camera contents if no cameras found [\#3030](https://github.com/pypeclub/OpenPype/pull/3030)
|
||||
- General: adding limitations for pyright [\#2994](https://github.com/pypeclub/OpenPype/pull/2994)
|
||||
- Hiero: Define hiero as module [\#3717](https://github.com/pypeclub/OpenPype/pull/3717)
|
||||
- Deadline: better logging for DL webservice failures [\#3694](https://github.com/pypeclub/OpenPype/pull/3694)
|
||||
- Photoshop: resize saved images in ExtractReview for ffmpeg [\#3676](https://github.com/pypeclub/OpenPype/pull/3676)
|
||||
|
||||
## [3.14.0](https://github.com/pypeclub/OpenPype/tree/3.14.0) (2022-08-18)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.0-nightly.1...3.14.0)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685)
|
||||
- Ftrack: Set task status on farm publishing [\#3680](https://github.com/pypeclub/OpenPype/pull/3680)
|
||||
- Ftrack: Set task status on task creation in integrate hierarchy [\#3675](https://github.com/pypeclub/OpenPype/pull/3675)
|
||||
- Maya: Disable rendering of all lights for render instances submitted through Deadline. [\#3661](https://github.com/pypeclub/OpenPype/pull/3661)
|
||||
- General: Optimized OCIO configs [\#3650](https://github.com/pypeclub/OpenPype/pull/3650)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691)
|
||||
- General: Fix finding of last version [\#3656](https://github.com/pypeclub/OpenPype/pull/3656)
|
||||
- General: Extract Review can scale with pixel aspect ratio [\#3644](https://github.com/pypeclub/OpenPype/pull/3644)
|
||||
- Maya: Refactor moved usage of CreateRender settings [\#3643](https://github.com/pypeclub/OpenPype/pull/3643)
|
||||
- General: Hero version representations have full context [\#3638](https://github.com/pypeclub/OpenPype/pull/3638)
|
||||
- Nuke: color settings for render write node is working now [\#3632](https://github.com/pypeclub/OpenPype/pull/3632)
|
||||
- Maya: FBX support for update in reference loader [\#3631](https://github.com/pypeclub/OpenPype/pull/3631)
|
||||
|
||||
**🔀 Refactored code**
|
||||
|
||||
- General: Use client projects getter [\#3673](https://github.com/pypeclub/OpenPype/pull/3673)
|
||||
- Resolve: Match folder structure to other hosts [\#3653](https://github.com/pypeclub/OpenPype/pull/3653)
|
||||
- Maya: Hosts as modules [\#3647](https://github.com/pypeclub/OpenPype/pull/3647)
|
||||
- TimersManager: Plugins are in timers manager module [\#3639](https://github.com/pypeclub/OpenPype/pull/3639)
|
||||
- General: Move workfiles functions into pipeline [\#3637](https://github.com/pypeclub/OpenPype/pull/3637)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Deadline: Global job pre load is not Pype 2 compatible [\#3666](https://github.com/pypeclub/OpenPype/pull/3666)
|
||||
- Maya: Remove unused get current renderer logic [\#3645](https://github.com/pypeclub/OpenPype/pull/3645)
|
||||
- Kitsu|Fix: Movie project type fails & first loop children names [\#3636](https://github.com/pypeclub/OpenPype/pull/3636)
|
||||
- fix the bug of failing to extract look when UDIMs format used in AiImage [\#3628](https://github.com/pypeclub/OpenPype/pull/3628)
|
||||
|
||||
## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.13.0-nightly.1...3.13.0)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- Editorial: Mix audio use side file for ffmpeg filters [\#3630](https://github.com/pypeclub/OpenPype/pull/3630)
|
||||
- Ftrack: Comment template can contain optional keys [\#3615](https://github.com/pypeclub/OpenPype/pull/3615)
|
||||
- Ftrack: Add more metadata to ftrack components [\#3612](https://github.com/pypeclub/OpenPype/pull/3612)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- Maya: fix aov separator in Redshift [\#3625](https://github.com/pypeclub/OpenPype/pull/3625)
|
||||
- Fix for multi-version build on Mac [\#3622](https://github.com/pypeclub/OpenPype/pull/3622)
|
||||
- Ftrack: Sync hierarchical attributes can handle new created entities [\#3621](https://github.com/pypeclub/OpenPype/pull/3621)
|
||||
- General: Extract review aspect ratio scale is calculated by ffmpeg [\#3620](https://github.com/pypeclub/OpenPype/pull/3620)
|
||||
- Maya: Fix types of default settings [\#3617](https://github.com/pypeclub/OpenPype/pull/3617)
|
||||
- Integrator: Don't force to have dot before frame [\#3611](https://github.com/pypeclub/OpenPype/pull/3611)
|
||||
|
||||
**🔀 Refactored code**
|
||||
|
||||
- General: Plugin settings handled by plugins [\#3623](https://github.com/pypeclub/OpenPype/pull/3623)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Webpublisher: timeout for PS studio processing [\#3619](https://github.com/pypeclub/OpenPype/pull/3619)
|
||||
- Core: translated validate\_containers.py into New publisher style [\#3614](https://github.com/pypeclub/OpenPype/pull/3614)
|
||||
|
||||
## [3.12.2](https://github.com/pypeclub/OpenPype/tree/3.12.2) (2022-07-27)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.2-nightly.4...3.12.2)
|
||||
|
||||
## [3.12.1](https://github.com/pypeclub/OpenPype/tree/3.12.1) (2022-07-13)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.1-nightly.6...3.12.1)
|
||||
|
||||
## [3.12.0](https://github.com/pypeclub/OpenPype/tree/3.12.0) (2022-06-28)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.0-nightly.3...3.12.0)
|
||||
|
||||
## [3.11.1](https://github.com/pypeclub/OpenPype/tree/3.11.1) (2022-06-20)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.11.1-nightly.1...3.11.1)
|
||||
|
||||
## [3.11.0](https://github.com/pypeclub/OpenPype/tree/3.11.0) (2022-06-17)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.11.0-nightly.4...3.11.0)
|
||||
|
||||
## [3.10.0](https://github.com/pypeclub/OpenPype/tree/3.10.0) (2022-05-26)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.10.0-nightly.6...3.10.0)
|
||||
|
||||
## [3.9.8](https://github.com/pypeclub/OpenPype/tree/3.9.8) (2022-05-19)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.7...3.9.8)
|
||||
|
||||
## [3.9.7](https://github.com/pypeclub/OpenPype/tree/3.9.7) (2022-05-11)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.6...3.9.7)
|
||||
|
||||
## [3.9.6](https://github.com/pypeclub/OpenPype/tree/3.9.6) (2022-05-03)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.5...3.9.6)
|
||||
|
||||
## [3.9.5](https://github.com/pypeclub/OpenPype/tree/3.9.5) (2022-04-25)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.10.0-nightly.2...3.9.5)
|
||||
|
||||
## [3.9.4](https://github.com/pypeclub/OpenPype/tree/3.9.4) (2022-04-15)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.4-nightly.2...3.9.4)
|
||||
|
||||
## [3.9.3](https://github.com/pypeclub/OpenPype/tree/3.9.3) (2022-04-07)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.3-nightly.2...3.9.3)
|
||||
|
||||
## [3.9.2](https://github.com/pypeclub/OpenPype/tree/3.9.2) (2022-04-04)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.2-nightly.4...3.9.2)
|
||||
|
||||
### 📖 Documentation
|
||||
|
||||
- Documentation: Added mention of adding My Drive as a root [\#2999](https://github.com/pypeclub/OpenPype/pull/2999)
|
||||
- Docs: Added MongoDB requirements [\#2951](https://github.com/pypeclub/OpenPype/pull/2951)
|
||||
- Documentation: New publisher develop docs [\#2896](https://github.com/pypeclub/OpenPype/pull/2896)
|
||||
|
||||
**🆕 New features**
|
||||
|
||||
- nuke: bypass baking [\#2992](https://github.com/pypeclub/OpenPype/pull/2992)
|
||||
- Multiverse: Initial Support [\#2908](https://github.com/pypeclub/OpenPype/pull/2908)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- TVPaint: Render scene family [\#3000](https://github.com/pypeclub/OpenPype/pull/3000)
|
||||
- Nuke: ReviewDataMov Read RAW attribute [\#2985](https://github.com/pypeclub/OpenPype/pull/2985)
|
||||
- General: `METADATA\_KEYS` constant as `frozenset` for optimal immutable lookup [\#2980](https://github.com/pypeclub/OpenPype/pull/2980)
|
||||
- General: Tools with host filters [\#2975](https://github.com/pypeclub/OpenPype/pull/2975)
|
||||
- Hero versions: Use custom templates [\#2967](https://github.com/pypeclub/OpenPype/pull/2967)
|
||||
- Slack: Added configurable maximum file size of review upload to Slack [\#2945](https://github.com/pypeclub/OpenPype/pull/2945)
|
||||
- NewPublisher: Prepared implementation of optional pyblish plugin [\#2943](https://github.com/pypeclub/OpenPype/pull/2943)
|
||||
- TVPaint: Extractor to convert PNG into EXR [\#2942](https://github.com/pypeclub/OpenPype/pull/2942)
|
||||
- Workfiles: Open published workfiles [\#2925](https://github.com/pypeclub/OpenPype/pull/2925)
|
||||
- General: Default modules loaded dynamically [\#2923](https://github.com/pypeclub/OpenPype/pull/2923)
|
||||
- Nuke: Add no-audio Tag [\#2911](https://github.com/pypeclub/OpenPype/pull/2911)
|
||||
- Nuke: improving readability [\#2903](https://github.com/pypeclub/OpenPype/pull/2903)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- Hosts: Remove path existence checks in 'add\_implementation\_envs' [\#3004](https://github.com/pypeclub/OpenPype/pull/3004)
|
||||
- Fix - remove doubled dot in workfile created from template [\#2998](https://github.com/pypeclub/OpenPype/pull/2998)
|
||||
- PS: fix renaming subset incorrectly in PS [\#2991](https://github.com/pypeclub/OpenPype/pull/2991)
|
||||
- Fix: Disable setuptools auto discovery [\#2990](https://github.com/pypeclub/OpenPype/pull/2990)
|
||||
- AEL: fix opening existing workfile if no scene opened [\#2989](https://github.com/pypeclub/OpenPype/pull/2989)
|
||||
- Maya: Don't do hardlinks on windows for look publishing [\#2986](https://github.com/pypeclub/OpenPype/pull/2986)
|
||||
- Settings UI: Fix version completer on linux [\#2981](https://github.com/pypeclub/OpenPype/pull/2981)
|
||||
- Photoshop: Fix creation of subset names in PS review and workfile [\#2969](https://github.com/pypeclub/OpenPype/pull/2969)
|
||||
- Slack: Added default for review\_upload\_limit for Slack [\#2965](https://github.com/pypeclub/OpenPype/pull/2965)
|
||||
- General: OIIO conversion for ffmeg can handle sequences [\#2958](https://github.com/pypeclub/OpenPype/pull/2958)
|
||||
- Settings: Conditional dictionary avoid invalid logs [\#2956](https://github.com/pypeclub/OpenPype/pull/2956)
|
||||
- General: Smaller fixes and typos [\#2950](https://github.com/pypeclub/OpenPype/pull/2950)
|
||||
- LogViewer: Don't refresh on initialization [\#2949](https://github.com/pypeclub/OpenPype/pull/2949)
|
||||
- nuke: python3 compatibility issue with `iteritems` [\#2948](https://github.com/pypeclub/OpenPype/pull/2948)
|
||||
- General: anatomy data with correct task short key [\#2947](https://github.com/pypeclub/OpenPype/pull/2947)
|
||||
- SceneInventory: Fix imports in UI [\#2944](https://github.com/pypeclub/OpenPype/pull/2944)
|
||||
- Slack: add generic exception [\#2941](https://github.com/pypeclub/OpenPype/pull/2941)
|
||||
- General: Python specific vendor paths on env injection [\#2939](https://github.com/pypeclub/OpenPype/pull/2939)
|
||||
- General: More fail safe delete old versions [\#2936](https://github.com/pypeclub/OpenPype/pull/2936)
|
||||
- Settings UI: Collapsed of collapsible wrapper works as expected [\#2934](https://github.com/pypeclub/OpenPype/pull/2934)
|
||||
- Maya: Do not pass `set` to maya commands \(fixes support for older maya versions\) [\#2932](https://github.com/pypeclub/OpenPype/pull/2932)
|
||||
- General: Don't print log record on OSError [\#2926](https://github.com/pypeclub/OpenPype/pull/2926)
|
||||
- Flame: centos related debugging [\#2922](https://github.com/pypeclub/OpenPype/pull/2922)
|
||||
|
||||
**🔀 Refactored code**
|
||||
|
||||
- General: Move plugins register and discover [\#2935](https://github.com/pypeclub/OpenPype/pull/2935)
|
||||
- General: Move Attribute Definitions from pipeline [\#2931](https://github.com/pypeclub/OpenPype/pull/2931)
|
||||
- General: Removed silo references and terminal splash [\#2927](https://github.com/pypeclub/OpenPype/pull/2927)
|
||||
- General: Move pipeline constants to OpenPype [\#2918](https://github.com/pypeclub/OpenPype/pull/2918)
|
||||
- General: Move remaining plugins from avalon [\#2912](https://github.com/pypeclub/OpenPype/pull/2912)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Bump paramiko from 2.9.2 to 2.10.1 [\#2973](https://github.com/pypeclub/OpenPype/pull/2973)
|
||||
- Bump minimist from 1.2.5 to 1.2.6 in /website [\#2954](https://github.com/pypeclub/OpenPype/pull/2954)
|
||||
- Bump node-forge from 1.2.1 to 1.3.0 in /website [\#2953](https://github.com/pypeclub/OpenPype/pull/2953)
|
||||
- Maya - added transparency into review creator [\#2952](https://github.com/pypeclub/OpenPype/pull/2952)
|
||||
|
||||
## [3.9.1](https://github.com/pypeclub/OpenPype/tree/3.9.1) (2022-03-18)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.1-nightly.3...3.9.1)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- General: Change how OPENPYPE\_DEBUG value is handled [\#2907](https://github.com/pypeclub/OpenPype/pull/2907)
|
||||
- nuke: imageio adding ocio config version 1.2 [\#2897](https://github.com/pypeclub/OpenPype/pull/2897)
|
||||
- Flame: support for comment with xml attribute overrides [\#2892](https://github.com/pypeclub/OpenPype/pull/2892)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- General: Fix use of Anatomy roots [\#2904](https://github.com/pypeclub/OpenPype/pull/2904)
|
||||
- Fixing gap detection in extract review [\#2902](https://github.com/pypeclub/OpenPype/pull/2902)
|
||||
- Pyblish Pype - ensure current state is correct when entering new group order [\#2899](https://github.com/pypeclub/OpenPype/pull/2899)
|
||||
- SceneInventory: Fix import of load function [\#2894](https://github.com/pypeclub/OpenPype/pull/2894)
|
||||
- Harmony - fixed creator issue [\#2891](https://github.com/pypeclub/OpenPype/pull/2891)
|
||||
|
||||
**🔀 Refactored code**
|
||||
|
||||
- General: Reduce style usage to OpenPype repository [\#2889](https://github.com/pypeclub/OpenPype/pull/2889)
|
||||
|
||||
## [3.9.0](https://github.com/pypeclub/OpenPype/tree/3.9.0) (2022-03-14)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.0-nightly.9...3.9.0)
|
||||
|
|
|
|||
55
README.md
55
README.md
|
|
@ -1,4 +1,7 @@
|
|||
|
||||
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
|
||||
[](#contributors-)
|
||||
<!-- ALL-CONTRIBUTORS-BADGE:END -->
|
||||
OpenPype
|
||||
====
|
||||
|
||||
|
|
@ -283,3 +286,55 @@ Running tests
|
|||
To run tests, execute `.\tools\run_tests(.ps1|.sh)`.
|
||||
|
||||
**Note that it needs existing virtual environment.**
|
||||
|
||||
## Contributors ✨
|
||||
|
||||
Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
|
||||
|
||||
<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section -->
|
||||
<!-- prettier-ignore-start -->
|
||||
<!-- markdownlint-disable -->
|
||||
<table>
|
||||
<tr>
|
||||
<td align="center"><a href="http://pype.club/"><img src="https://avatars.githubusercontent.com/u/3333008?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Milan Kolar</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=mkolar" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=mkolar" title="Documentation">📖</a> <a href="#infra-mkolar" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="#business-mkolar" title="Business development">💼</a> <a href="#content-mkolar" title="Content">🖋</a> <a href="#fundingFinding-mkolar" title="Funding Finding">🔍</a> <a href="#maintenance-mkolar" title="Maintenance">🚧</a> <a href="#projectManagement-mkolar" title="Project Management">📆</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3Amkolar" title="Reviewed Pull Requests">👀</a> <a href="#mentoring-mkolar" title="Mentoring">🧑🏫</a> <a href="#question-mkolar" title="Answering Questions">💬</a></td>
|
||||
<td align="center"><a href="https://www.linkedin.com/in/jakubjezek79"><img src="https://avatars.githubusercontent.com/u/40640033?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Jakub Ježek</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=jakubjezek001" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=jakubjezek001" title="Documentation">📖</a> <a href="#infra-jakubjezek001" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="#content-jakubjezek001" title="Content">🖋</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3Ajakubjezek001" title="Reviewed Pull Requests">👀</a> <a href="#maintenance-jakubjezek001" title="Maintenance">🚧</a> <a href="#mentoring-jakubjezek001" title="Mentoring">🧑🏫</a> <a href="#projectManagement-jakubjezek001" title="Project Management">📆</a> <a href="#question-jakubjezek001" title="Answering Questions">💬</a></td>
|
||||
<td align="center"><a href="https://github.com/antirotor"><img src="https://avatars.githubusercontent.com/u/33513211?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Ondřej Samohel</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=antirotor" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=antirotor" title="Documentation">📖</a> <a href="#infra-antirotor" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="#content-antirotor" title="Content">🖋</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3Aantirotor" title="Reviewed Pull Requests">👀</a> <a href="#maintenance-antirotor" title="Maintenance">🚧</a> <a href="#mentoring-antirotor" title="Mentoring">🧑🏫</a> <a href="#projectManagement-antirotor" title="Project Management">📆</a> <a href="#question-antirotor" title="Answering Questions">💬</a></td>
|
||||
<td align="center"><a href="https://github.com/iLLiCiTiT"><img src="https://avatars.githubusercontent.com/u/43494761?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Jakub Trllo</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=iLLiCiTiT" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=iLLiCiTiT" title="Documentation">📖</a> <a href="#infra-iLLiCiTiT" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3AiLLiCiTiT" title="Reviewed Pull Requests">👀</a> <a href="#maintenance-iLLiCiTiT" title="Maintenance">🚧</a> <a href="#question-iLLiCiTiT" title="Answering Questions">💬</a></td>
|
||||
<td align="center"><a href="https://github.com/kalisp"><img src="https://avatars.githubusercontent.com/u/4457962?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Petr Kalis</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=kalisp" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=kalisp" title="Documentation">📖</a> <a href="#infra-kalisp" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3Akalisp" title="Reviewed Pull Requests">👀</a> <a href="#maintenance-kalisp" title="Maintenance">🚧</a> <a href="#question-kalisp" title="Answering Questions">💬</a></td>
|
||||
<td align="center"><a href="https://github.com/64qam"><img src="https://avatars.githubusercontent.com/u/26925793?v=4?s=100" width="100px;" alt=""/><br /><sub><b>64qam</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=64qam" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3A64qam" title="Reviewed Pull Requests">👀</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=64qam" title="Documentation">📖</a> <a href="#infra-64qam" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="#projectManagement-64qam" title="Project Management">📆</a> <a href="#maintenance-64qam" title="Maintenance">🚧</a> <a href="#content-64qam" title="Content">🖋</a> <a href="#userTesting-64qam" title="User Testing">📓</a></td>
|
||||
<td align="center"><a href="http://www.colorbleed.nl/"><img src="https://avatars.githubusercontent.com/u/2439881?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Roy Nieterau</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=BigRoy" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=BigRoy" title="Documentation">📖</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3ABigRoy" title="Reviewed Pull Requests">👀</a> <a href="#mentoring-BigRoy" title="Mentoring">🧑🏫</a> <a href="#question-BigRoy" title="Answering Questions">💬</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center"><a href="https://github.com/tokejepsen"><img src="https://avatars.githubusercontent.com/u/1860085?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Toke Jepsen</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=tokejepsen" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=tokejepsen" title="Documentation">📖</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3Atokejepsen" title="Reviewed Pull Requests">👀</a> <a href="#mentoring-tokejepsen" title="Mentoring">🧑🏫</a> <a href="#question-tokejepsen" title="Answering Questions">💬</a></td>
|
||||
<td align="center"><a href="https://github.com/jrsndl"><img src="https://avatars.githubusercontent.com/u/45896205?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Jiri Sindelar</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=jrsndl" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3Ajrsndl" title="Reviewed Pull Requests">👀</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=jrsndl" title="Documentation">📖</a> <a href="#content-jrsndl" title="Content">🖋</a> <a href="#tutorial-jrsndl" title="Tutorials">✅</a> <a href="#userTesting-jrsndl" title="User Testing">📓</a></td>
|
||||
<td align="center"><a href="https://barbierisimone.com/"><img src="https://avatars.githubusercontent.com/u/1087869?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Simone Barbieri</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=simonebarbieri" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=simonebarbieri" title="Documentation">📖</a></td>
|
||||
<td align="center"><a href="http://karimmozilla.xyz/"><img src="https://avatars.githubusercontent.com/u/82811760?v=4?s=100" width="100px;" alt=""/><br /><sub><b>karimmozilla</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=karimmozilla" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://github.com/Allan-I"><img src="https://avatars.githubusercontent.com/u/76656700?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Allan I. A.</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=Allan-I" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://www.linkedin.com/in/mmuurrpphhyy/"><img src="https://avatars.githubusercontent.com/u/352795?v=4?s=100" width="100px;" alt=""/><br /><sub><b>murphy</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=m-u-r-p-h-y" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3Am-u-r-p-h-y" title="Reviewed Pull Requests">👀</a> <a href="#userTesting-m-u-r-p-h-y" title="User Testing">📓</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=m-u-r-p-h-y" title="Documentation">📖</a> <a href="#projectManagement-m-u-r-p-h-y" title="Project Management">📆</a></td>
|
||||
<td align="center"><a href="https://github.com/aardschok"><img src="https://avatars.githubusercontent.com/u/26920875?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Wijnand Koreman</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=aardschok" title="Code">💻</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center"><a href="http://jedimaster.cnblogs.com/"><img src="https://avatars.githubusercontent.com/u/1798206?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Bo Zhou</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=zhoub" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://www.linkedin.com/in/clementhector/"><img src="https://avatars.githubusercontent.com/u/7068597?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Clément Hector</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=ClementHector" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3AClementHector" title="Reviewed Pull Requests">👀</a></td>
|
||||
<td align="center"><a href="https://twitter.com/davidlatwe"><img src="https://avatars.githubusercontent.com/u/3357009?v=4?s=100" width="100px;" alt=""/><br /><sub><b>David Lai</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=davidlatwe" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3Adavidlatwe" title="Reviewed Pull Requests">👀</a></td>
|
||||
<td align="center"><a href="https://github.com/2-REC"><img src="https://avatars.githubusercontent.com/u/42170307?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Derek </b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=2-REC" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=2-REC" title="Documentation">📖</a></td>
|
||||
<td align="center"><a href="https://github.com/gabormarinov"><img src="https://avatars.githubusercontent.com/u/8620515?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Gábor Marinov</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=gabormarinov" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=gabormarinov" title="Documentation">📖</a></td>
|
||||
<td align="center"><a href="https://github.com/icyvapor"><img src="https://avatars.githubusercontent.com/u/1195278?v=4?s=100" width="100px;" alt=""/><br /><sub><b>icyvapor</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=icyvapor" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=icyvapor" title="Documentation">📖</a></td>
|
||||
<td align="center"><a href="https://github.com/jlorrain"><img src="https://avatars.githubusercontent.com/u/7955673?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Jérôme LORRAIN</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=jlorrain" title="Code">💻</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center"><a href="https://github.com/dmo-j-cube"><img src="https://avatars.githubusercontent.com/u/89823400?v=4?s=100" width="100px;" alt=""/><br /><sub><b>David Morris-Oliveros</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=dmo-j-cube" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://github.com/BenoitConnan"><img src="https://avatars.githubusercontent.com/u/82808268?v=4?s=100" width="100px;" alt=""/><br /><sub><b>BenoitConnan</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=BenoitConnan" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://github.com/Malthaldar"><img src="https://avatars.githubusercontent.com/u/33671694?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Malthaldar</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=Malthaldar" title="Code">💻</a></td>
|
||||
<td align="center"><a href="http://www.svenneve.com/"><img src="https://avatars.githubusercontent.com/u/2472863?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Sven Neve</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=svenneve" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://github.com/zafrs"><img src="https://avatars.githubusercontent.com/u/26890002?v=4?s=100" width="100px;" alt=""/><br /><sub><b>zafrs</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=zafrs" title="Code">💻</a></td>
|
||||
<td align="center"><a href="http://felixdavid.com/"><img src="https://avatars.githubusercontent.com/u/22875539?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Félix David</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=Tilix4" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=Tilix4" title="Documentation">📖</a></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<!-- markdownlint-restore -->
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
<!-- ALL-CONTRIBUTORS-LIST:END -->
|
||||
|
||||
This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome!
|
||||
|
|
@ -122,7 +122,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
if self.staging:
|
||||
if kwargs.get("build"):
|
||||
if "staging" not in kwargs.get("build"):
|
||||
kwargs["build"] = "{}-staging".format(kwargs.get("build"))
|
||||
kwargs["build"] = f"{kwargs.get('build')}-staging"
|
||||
else:
|
||||
kwargs["build"] = "staging"
|
||||
|
||||
|
|
@ -136,8 +136,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
return bool(result and self.staging == other.staging)
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}: {} - path={}>".format(
|
||||
self.__class__.__name__, str(self), self.path)
|
||||
return f"<{self.__class__.__name__}: {str(self)} - path={self.path}>"
|
||||
|
||||
def __lt__(self, other: OpenPypeVersion):
|
||||
result = super().__lt__(other)
|
||||
|
|
@ -232,10 +231,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
return openpype_version
|
||||
|
||||
def __hash__(self):
|
||||
if self.path:
|
||||
return hash(self.path)
|
||||
else:
|
||||
return hash(str(self))
|
||||
return hash(self.path) if self.path else hash(str(self))
|
||||
|
||||
@staticmethod
|
||||
def is_version_in_dir(
|
||||
|
|
@ -384,7 +380,8 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
|
||||
@classmethod
|
||||
def get_local_versions(
|
||||
cls, production: bool = None, staging: bool = None
|
||||
cls, production: bool = None,
|
||||
staging: bool = None
|
||||
) -> List:
|
||||
"""Get all versions available on this machine.
|
||||
|
||||
|
|
@ -394,6 +391,10 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
Args:
|
||||
production (bool): Return production versions.
|
||||
staging (bool): Return staging versions.
|
||||
|
||||
Returns:
|
||||
list: of compatible versions available on the machine.
|
||||
|
||||
"""
|
||||
# Return all local versions if arguments are set to None
|
||||
if production is None and staging is None:
|
||||
|
|
@ -410,10 +411,10 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
if not production and not staging:
|
||||
return []
|
||||
|
||||
# DEPRECATED: backwards compatible way to look for versions in root
|
||||
dir_to_search = Path(user_data_dir("openpype", "pypeclub"))
|
||||
versions = OpenPypeVersion.get_versions_from_directory(
|
||||
dir_to_search
|
||||
)
|
||||
versions = OpenPypeVersion.get_versions_from_directory(dir_to_search)
|
||||
|
||||
filtered_versions = []
|
||||
for version in versions:
|
||||
if version.is_staging():
|
||||
|
|
@ -425,7 +426,8 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
|
||||
@classmethod
|
||||
def get_remote_versions(
|
||||
cls, production: bool = None, staging: bool = None
|
||||
cls, production: bool = None,
|
||||
staging: bool = None
|
||||
) -> List:
|
||||
"""Get all versions available in OpenPype Path.
|
||||
|
||||
|
|
@ -435,6 +437,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
Args:
|
||||
production (bool): Return production versions.
|
||||
staging (bool): Return staging versions.
|
||||
|
||||
"""
|
||||
# Return all local versions if arguments are set to None
|
||||
if production is None and staging is None:
|
||||
|
|
@ -469,6 +472,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
return []
|
||||
|
||||
versions = cls.get_versions_from_directory(dir_to_search)
|
||||
|
||||
filtered_versions = []
|
||||
for version in versions:
|
||||
if version.is_staging():
|
||||
|
|
@ -479,7 +483,8 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
return list(sorted(set(filtered_versions)))
|
||||
|
||||
@staticmethod
|
||||
def get_versions_from_directory(openpype_dir: Path) -> List:
|
||||
def get_versions_from_directory(
|
||||
openpype_dir: Path) -> List:
|
||||
"""Get all detected OpenPype versions in directory.
|
||||
|
||||
Args:
|
||||
|
|
@ -492,15 +497,22 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
ValueError: if invalid path is specified.
|
||||
|
||||
"""
|
||||
openpype_versions = []
|
||||
if not openpype_dir.exists() and not openpype_dir.is_dir():
|
||||
raise ValueError("specified directory is invalid")
|
||||
return openpype_versions
|
||||
|
||||
_openpype_versions = []
|
||||
# iterate over directory in first level and find all that might
|
||||
# contain OpenPype.
|
||||
for item in openpype_dir.iterdir():
|
||||
# if the item is directory with major.minor version, dive deeper
|
||||
|
||||
# if file, strip extension, in case of dir not.
|
||||
if item.is_dir() and re.match(r"^\d+\.\d+$", item.name):
|
||||
_versions = OpenPypeVersion.get_versions_from_directory(
|
||||
item)
|
||||
if _versions:
|
||||
openpype_versions += _versions
|
||||
|
||||
# if file exists, strip extension, in case of dir don't.
|
||||
name = item.name if item.is_dir() else item.stem
|
||||
result = OpenPypeVersion.version_in_str(name)
|
||||
|
||||
|
|
@ -519,9 +531,9 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
continue
|
||||
|
||||
detected_version.path = item
|
||||
_openpype_versions.append(detected_version)
|
||||
openpype_versions.append(detected_version)
|
||||
|
||||
return sorted(_openpype_versions)
|
||||
return sorted(openpype_versions)
|
||||
|
||||
@staticmethod
|
||||
def get_installed_version_str() -> str:
|
||||
|
|
@ -550,13 +562,13 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
staging: bool = False,
|
||||
local: bool = None,
|
||||
remote: bool = None
|
||||
) -> OpenPypeVersion:
|
||||
"""Get latest available version.
|
||||
) -> Union[OpenPypeVersion, None]:
|
||||
"""Get the latest available version.
|
||||
|
||||
The version does not contain information about path and source.
|
||||
|
||||
This is utility version to get latest version from all found. Build
|
||||
version is not listed if staging is enabled.
|
||||
This is utility version to get the latest version from all found.
|
||||
Build version is not listed if staging is enabled.
|
||||
|
||||
Arguments 'local' and 'remote' define if local and remote repository
|
||||
versions are used. All versions are used if both are not set (or set
|
||||
|
|
@ -568,6 +580,10 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
staging (bool, optional): List staging versions if True.
|
||||
local (bool, optional): List local versions if True.
|
||||
remote (bool, optional): List remote versions if True.
|
||||
|
||||
Returns:
|
||||
Latest OpenPypeVersion or None
|
||||
|
||||
"""
|
||||
if local is None and remote is None:
|
||||
local = True
|
||||
|
|
@ -621,14 +637,27 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
return None
|
||||
return OpenPypeVersion(version=result)
|
||||
|
||||
def is_compatible(self, version: OpenPypeVersion):
|
||||
"""Test build compatibility.
|
||||
|
||||
This will simply compare major and minor versions (ignoring patch
|
||||
and the rest).
|
||||
|
||||
Args:
|
||||
version (OpenPypeVersion): Version to check compatibility with.
|
||||
|
||||
Returns:
|
||||
bool: if the version is compatible
|
||||
|
||||
"""
|
||||
return self.major == version.major and self.minor == version.minor
|
||||
|
||||
|
||||
class BootstrapRepos:
|
||||
"""Class for bootstrapping local OpenPype installation.
|
||||
|
||||
Attributes:
|
||||
data_dir (Path): local OpenPype installation directory.
|
||||
live_repo_dir (Path): path to repos directory if running live,
|
||||
otherwise `None`.
|
||||
registry (OpenPypeSettingsRegistry): OpenPype registry object.
|
||||
zip_filter (list): List of files to exclude from zip
|
||||
openpype_filter (list): list of top level directories to
|
||||
|
|
@ -654,7 +683,7 @@ class BootstrapRepos:
|
|||
self.registry = OpenPypeSettingsRegistry()
|
||||
self.zip_filter = [".pyc", "__pycache__"]
|
||||
self.openpype_filter = [
|
||||
"openpype", "repos", "schema", "LICENSE"
|
||||
"openpype", "schema", "LICENSE"
|
||||
]
|
||||
self._message = message
|
||||
|
||||
|
|
@ -667,11 +696,6 @@ class BootstrapRepos:
|
|||
progress_callback = empty_progress
|
||||
self._progress_callback = progress_callback
|
||||
|
||||
if getattr(sys, "frozen", False):
|
||||
self.live_repo_dir = Path(sys.executable).parent / "repos"
|
||||
else:
|
||||
self.live_repo_dir = Path(Path(__file__).parent / ".." / "repos")
|
||||
|
||||
@staticmethod
|
||||
def get_version_path_from_list(
|
||||
version: str, version_list: list) -> Union[Path, None]:
|
||||
|
|
@ -721,9 +745,9 @@ class BootstrapRepos:
|
|||
self, repo_dir: Path = None) -> Union[OpenPypeVersion, None]:
|
||||
"""Copy zip created from OpenPype repositories to user data dir.
|
||||
|
||||
This detect OpenPype version either in local "live" OpenPype
|
||||
This detects OpenPype version either in local "live" OpenPype
|
||||
repository or in user provided path. Then it will zip it in temporary
|
||||
directory and finally it will move it to destination which is user
|
||||
directory, and finally it will move it to destination which is user
|
||||
data directory. Existing files will be replaced.
|
||||
|
||||
Args:
|
||||
|
|
@ -734,21 +758,23 @@ class BootstrapRepos:
|
|||
|
||||
"""
|
||||
# if repo dir is not set, we detect local "live" OpenPype repository
|
||||
# version and use it as a source. Otherwise repo_dir is user
|
||||
# version and use it as a source. Otherwise, repo_dir is user
|
||||
# entered location.
|
||||
if not repo_dir:
|
||||
version = OpenPypeVersion.get_installed_version_str()
|
||||
repo_dir = self.live_repo_dir
|
||||
else:
|
||||
if repo_dir:
|
||||
version = self.get_version(repo_dir)
|
||||
else:
|
||||
installed_version = OpenPypeVersion.get_installed_version()
|
||||
version = str(installed_version)
|
||||
repo_dir = installed_version.path
|
||||
|
||||
if not version:
|
||||
self._print("OpenPype not found.", LOG_ERROR)
|
||||
return
|
||||
|
||||
# create destination directory
|
||||
if not self.data_dir.exists():
|
||||
self.data_dir.mkdir(parents=True)
|
||||
destination = self.data_dir / f"{installed_version.major}.{installed_version.minor}" # noqa
|
||||
if not destination.exists():
|
||||
destination.mkdir(parents=True)
|
||||
|
||||
# create zip inside temporary directory.
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
|
|
@ -756,7 +782,7 @@ class BootstrapRepos:
|
|||
Path(temp_dir) / f"openpype-v{version}.zip"
|
||||
self._print(f"creating zip: {temp_zip}")
|
||||
|
||||
self._create_openpype_zip(temp_zip, repo_dir.parent)
|
||||
self._create_openpype_zip(temp_zip, repo_dir)
|
||||
if not os.path.exists(temp_zip):
|
||||
self._print("make archive failed.", LOG_ERROR)
|
||||
return None
|
||||
|
|
@ -776,7 +802,9 @@ class BootstrapRepos:
|
|||
Path to moved zip on success.
|
||||
|
||||
"""
|
||||
destination = self.data_dir / zip_file.name
|
||||
version = OpenPypeVersion.version_in_str(zip_file.name)
|
||||
destination_dir = self.data_dir / f"{version.major}.{version.minor}"
|
||||
destination = destination_dir / zip_file.name
|
||||
|
||||
if destination.exists():
|
||||
self._print(
|
||||
|
|
@ -788,7 +816,7 @@ class BootstrapRepos:
|
|||
self._print(str(e), LOG_ERROR, exc_info=True)
|
||||
return None
|
||||
try:
|
||||
shutil.move(zip_file.as_posix(), self.data_dir.as_posix())
|
||||
shutil.move(zip_file.as_posix(), destination_dir.as_posix())
|
||||
except shutil.Error as e:
|
||||
self._print(str(e), LOG_ERROR, exc_info=True)
|
||||
return None
|
||||
|
|
@ -1001,6 +1029,16 @@ class BootstrapRepos:
|
|||
|
||||
@staticmethod
|
||||
def _validate_dir(path: Path) -> tuple:
|
||||
"""Validate checksums in a given path.
|
||||
|
||||
Args:
|
||||
path (Path): path to folder to validate.
|
||||
|
||||
Returns:
|
||||
tuple(bool, str): returns status and reason as a bool
|
||||
and str in a tuple.
|
||||
|
||||
"""
|
||||
checksums_file = Path(path / "checksums")
|
||||
if not checksums_file.exists():
|
||||
# FIXME: This should be set to False sometimes in the future
|
||||
|
|
@ -1057,27 +1095,11 @@ class BootstrapRepos:
|
|||
if not archive.is_file() and not archive.exists():
|
||||
raise ValueError("Archive is not file.")
|
||||
|
||||
with ZipFile(archive, "r") as zip_file:
|
||||
name_list = zip_file.namelist()
|
||||
|
||||
roots = []
|
||||
paths = []
|
||||
for item in name_list:
|
||||
if not item.startswith("repos/"):
|
||||
continue
|
||||
|
||||
root = item.split("/")[1]
|
||||
|
||||
if root not in roots:
|
||||
roots.append(root)
|
||||
paths.append(
|
||||
f"{archive}{os.path.sep}repos{os.path.sep}{root}")
|
||||
sys.path.insert(0, paths[-1])
|
||||
|
||||
sys.path.insert(0, f"{archive}")
|
||||
archive_path = str(archive)
|
||||
sys.path.insert(0, archive_path)
|
||||
pythonpath = os.getenv("PYTHONPATH", "")
|
||||
python_paths = pythonpath.split(os.pathsep)
|
||||
python_paths += paths
|
||||
python_paths.insert(0, archive_path)
|
||||
|
||||
os.environ["PYTHONPATH"] = os.pathsep.join(python_paths)
|
||||
|
||||
|
|
@ -1094,31 +1116,28 @@ class BootstrapRepos:
|
|||
directory (Path): path to directory.
|
||||
|
||||
"""
|
||||
|
||||
sys.path.insert(0, directory.as_posix())
|
||||
directory /= "repos"
|
||||
if not directory.exists() and not directory.is_dir():
|
||||
raise ValueError("directory is invalid")
|
||||
|
||||
roots = []
|
||||
for item in directory.iterdir():
|
||||
if item.is_dir():
|
||||
root = item.as_posix()
|
||||
if root not in roots:
|
||||
roots.append(root)
|
||||
sys.path.insert(0, root)
|
||||
|
||||
pythonpath = os.getenv("PYTHONPATH", "")
|
||||
paths = pythonpath.split(os.pathsep)
|
||||
paths += roots
|
||||
|
||||
os.environ["PYTHONPATH"] = os.pathsep.join(paths)
|
||||
|
||||
@staticmethod
|
||||
def find_openpype_version(version, staging):
|
||||
def find_openpype_version(
|
||||
version: Union[str, OpenPypeVersion],
|
||||
staging: bool
|
||||
) -> Union[OpenPypeVersion, None]:
|
||||
"""Find location of specified OpenPype version.
|
||||
|
||||
Args:
|
||||
version (Union[str, OpenPypeVersion): Version to find.
|
||||
staging (bool): Filter staging versions.
|
||||
|
||||
Returns:
|
||||
requested OpenPypeVersion.
|
||||
|
||||
"""
|
||||
installed_version = OpenPypeVersion.get_installed_version()
|
||||
if isinstance(version, str):
|
||||
version = OpenPypeVersion(version=version)
|
||||
|
||||
installed_version = OpenPypeVersion.get_installed_version()
|
||||
if installed_version == version:
|
||||
return installed_version
|
||||
|
||||
|
|
@ -1145,7 +1164,18 @@ class BootstrapRepos:
|
|||
return None
|
||||
|
||||
@staticmethod
|
||||
def find_latest_openpype_version(staging):
|
||||
def find_latest_openpype_version(
|
||||
staging: bool
|
||||
) -> Union[OpenPypeVersion, None]:
|
||||
"""Find the latest available OpenPype version in all location.
|
||||
|
||||
Args:
|
||||
staging (bool): True to look for staging versions.
|
||||
|
||||
Returns:
|
||||
Latest OpenPype version on None if nothing was found.
|
||||
|
||||
"""
|
||||
installed_version = OpenPypeVersion.get_installed_version()
|
||||
local_versions = OpenPypeVersion.get_local_versions(
|
||||
staging=staging
|
||||
|
|
@ -1176,7 +1206,8 @@ class BootstrapRepos:
|
|||
self,
|
||||
openpype_path: Union[Path, str] = None,
|
||||
staging: bool = False,
|
||||
include_zips: bool = False) -> Union[List[OpenPypeVersion], None]:
|
||||
include_zips: bool = False
|
||||
) -> Union[List[OpenPypeVersion], None]:
|
||||
"""Get ordered dict of detected OpenPype version.
|
||||
|
||||
Resolution order for OpenPype is following:
|
||||
|
|
@ -1210,30 +1241,38 @@ class BootstrapRepos:
|
|||
("Finding OpenPype in non-filesystem locations is"
|
||||
" not implemented yet."))
|
||||
|
||||
dir_to_search = self.data_dir
|
||||
user_versions = self.get_openpype_versions(self.data_dir, staging)
|
||||
# if we have openpype_path specified, search only there.
|
||||
# if checks bellow for OPENPYPE_PATH and registry fails, use data_dir
|
||||
# DEPRECATED: lookup in root of this folder is deprecated in favour
|
||||
# of major.minor sub-folders.
|
||||
dirs_to_search = [self.data_dir]
|
||||
|
||||
if openpype_path:
|
||||
dir_to_search = openpype_path
|
||||
dirs_to_search = [openpype_path]
|
||||
elif os.getenv("OPENPYPE_PATH") \
|
||||
and Path(os.getenv("OPENPYPE_PATH")).exists():
|
||||
# first try OPENPYPE_PATH and if that is not available,
|
||||
# try registry.
|
||||
dirs_to_search = [Path(os.getenv("OPENPYPE_PATH"))]
|
||||
else:
|
||||
if os.getenv("OPENPYPE_PATH"):
|
||||
if Path(os.getenv("OPENPYPE_PATH")).exists():
|
||||
dir_to_search = Path(os.getenv("OPENPYPE_PATH"))
|
||||
else:
|
||||
try:
|
||||
registry_dir = Path(
|
||||
str(self.registry.get_item("openPypePath")))
|
||||
if registry_dir.exists():
|
||||
dir_to_search = registry_dir
|
||||
try:
|
||||
registry_dir = Path(
|
||||
str(self.registry.get_item("openPypePath")))
|
||||
if registry_dir.exists():
|
||||
dirs_to_search = [registry_dir]
|
||||
|
||||
except ValueError:
|
||||
# nothing found in registry, we'll use data dir
|
||||
pass
|
||||
except ValueError:
|
||||
# nothing found in registry, we'll use data dir
|
||||
pass
|
||||
|
||||
openpype_versions = self.get_openpype_versions(dir_to_search, staging)
|
||||
openpype_versions += user_versions
|
||||
openpype_versions = []
|
||||
for dir_to_search in dirs_to_search:
|
||||
try:
|
||||
openpype_versions += self.get_openpype_versions(
|
||||
dir_to_search, staging)
|
||||
except ValueError:
|
||||
# location is invalid, skip it
|
||||
pass
|
||||
|
||||
# remove zip file version if needed.
|
||||
if not include_zips:
|
||||
openpype_versions = [
|
||||
v for v in openpype_versions if v.path.suffix != ".zip"
|
||||
|
|
@ -1346,9 +1385,8 @@ class BootstrapRepos:
|
|||
raise ValueError(
|
||||
f"version {version} is not associated with any file")
|
||||
|
||||
destination = self.data_dir / version.path.stem
|
||||
if destination.exists():
|
||||
assert destination.is_dir()
|
||||
destination = self.data_dir / f"{version.major}.{version.minor}" / version.path.stem # noqa
|
||||
if destination.exists() and destination.is_dir():
|
||||
try:
|
||||
shutil.rmtree(destination)
|
||||
except OSError as e:
|
||||
|
|
@ -1417,7 +1455,7 @@ class BootstrapRepos:
|
|||
else:
|
||||
dir_name = openpype_version.path.stem
|
||||
|
||||
destination = self.data_dir / dir_name
|
||||
destination = self.data_dir / f"{openpype_version.major}.{openpype_version.minor}" / dir_name # noqa
|
||||
|
||||
# test if destination directory already exist, if so lets delete it.
|
||||
if destination.exists() and force:
|
||||
|
|
@ -1437,6 +1475,7 @@ class BootstrapRepos:
|
|||
# create destination parent directories even if they don't exist.
|
||||
destination.mkdir(parents=True)
|
||||
|
||||
remove_source_file = False
|
||||
# version is directory
|
||||
if openpype_version.path.is_dir():
|
||||
# create zip inside temporary directory.
|
||||
|
|
@ -1470,6 +1509,8 @@ class BootstrapRepos:
|
|||
self._progress_callback(35)
|
||||
openpype_version.path = self._copy_zip(
|
||||
openpype_version.path, destination)
|
||||
# Mark zip to be deleted when done
|
||||
remove_source_file = True
|
||||
|
||||
# extract zip there
|
||||
self._print("extracting zip to destination ...")
|
||||
|
|
@ -1478,6 +1519,10 @@ class BootstrapRepos:
|
|||
zip_ref.extractall(destination)
|
||||
self._progress_callback(100)
|
||||
|
||||
# Remove zip file copied to local app data
|
||||
if remove_source_file:
|
||||
os.remove(openpype_version.path)
|
||||
|
||||
return destination
|
||||
|
||||
def _copy_zip(self, source: Path, destination: Path) -> Path:
|
||||
|
|
@ -1588,9 +1633,10 @@ class BootstrapRepos:
|
|||
return False
|
||||
return True
|
||||
|
||||
def get_openpype_versions(self,
|
||||
openpype_dir: Path,
|
||||
staging: bool = False) -> list:
|
||||
def get_openpype_versions(
|
||||
self,
|
||||
openpype_dir: Path,
|
||||
staging: bool = False) -> list:
|
||||
"""Get all detected OpenPype versions in directory.
|
||||
|
||||
Args:
|
||||
|
|
@ -1605,14 +1651,20 @@ class BootstrapRepos:
|
|||
|
||||
"""
|
||||
if not openpype_dir.exists() and not openpype_dir.is_dir():
|
||||
raise ValueError("specified directory is invalid")
|
||||
raise ValueError(f"specified directory {openpype_dir} is invalid")
|
||||
|
||||
_openpype_versions = []
|
||||
openpype_versions = []
|
||||
# iterate over directory in first level and find all that might
|
||||
# contain OpenPype.
|
||||
for item in openpype_dir.iterdir():
|
||||
# if the item is directory with major.minor version, dive deeper
|
||||
if item.is_dir() and re.match(r"^\d+\.\d+$", item.name):
|
||||
_versions = self.get_openpype_versions(
|
||||
item, staging=staging)
|
||||
if _versions:
|
||||
openpype_versions += _versions
|
||||
|
||||
# if file, strip extension, in case of dir not.
|
||||
# if it is file, strip extension, in case of dir don't.
|
||||
name = item.name if item.is_dir() else item.stem
|
||||
result = OpenPypeVersion.version_in_str(name)
|
||||
|
||||
|
|
@ -1632,12 +1684,12 @@ class BootstrapRepos:
|
|||
|
||||
detected_version.path = item
|
||||
if staging and detected_version.is_staging():
|
||||
_openpype_versions.append(detected_version)
|
||||
openpype_versions.append(detected_version)
|
||||
|
||||
if not staging and not detected_version.is_staging():
|
||||
_openpype_versions.append(detected_version)
|
||||
openpype_versions.append(detected_version)
|
||||
|
||||
return sorted(_openpype_versions)
|
||||
return sorted(openpype_versions)
|
||||
|
||||
|
||||
class OpenPypeVersionExists(Exception):
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ class InstallThread(QThread):
|
|||
progress_callback=self.set_progress, message=self.message)
|
||||
local_version = OpenPypeVersion.get_installed_version_str()
|
||||
|
||||
# if user did entered nothing, we install OpenPype from local version.
|
||||
# if user did enter nothing, we install OpenPype from local version.
|
||||
# zip content of `repos`, copy it to user data dir and append
|
||||
# version to it.
|
||||
if not self._path:
|
||||
|
|
@ -93,6 +93,23 @@ class InstallThread(QThread):
|
|||
detected = bs.find_openpype(include_zips=True)
|
||||
|
||||
if detected:
|
||||
if not OpenPypeVersion.get_installed_version().is_compatible(
|
||||
detected[-1]):
|
||||
self.message.emit((
|
||||
f"Latest detected version {detected[-1]} "
|
||||
"is not compatible with the currently running "
|
||||
f"{local_version}"
|
||||
), True)
|
||||
self.message.emit((
|
||||
"Filtering detected versions to compatible ones..."
|
||||
), False)
|
||||
|
||||
detected = [
|
||||
version for version in detected
|
||||
if version.is_compatible(
|
||||
OpenPypeVersion.get_installed_version())
|
||||
]
|
||||
|
||||
if OpenPypeVersion(
|
||||
version=local_version, path=Path()) < detected[-1]:
|
||||
self.message.emit((
|
||||
|
|
|
|||
|
|
@ -21,6 +21,11 @@ class OpenPypeVersionNotFound(Exception):
|
|||
pass
|
||||
|
||||
|
||||
class OpenPypeVersionIncompatible(Exception):
|
||||
"""OpenPype version is not compatible with the installed one (build)."""
|
||||
pass
|
||||
|
||||
|
||||
def should_add_certificate_path_to_mongo_url(mongo_url):
|
||||
"""Check if should add ca certificate to mongo url.
|
||||
|
||||
|
|
|
|||
|
|
@ -18,7 +18,8 @@ AppPublisher=Orbi Tools s.r.o
|
|||
AppPublisherURL=http://pype.club
|
||||
AppSupportURL=http://pype.club
|
||||
AppUpdatesURL=http://pype.club
|
||||
DefaultDirName={autopf}\{#MyAppName}
|
||||
DefaultDirName={autopf}\{#MyAppName}\{#AppVer}
|
||||
UsePreviousAppDir=no
|
||||
DisableProgramGroupPage=yes
|
||||
OutputBaseFilename={#MyAppName}-{#AppVer}-install
|
||||
AllowCancelDuringInstall=yes
|
||||
|
|
@ -27,7 +28,7 @@ AllowCancelDuringInstall=yes
|
|||
PrivilegesRequiredOverridesAllowed=dialog
|
||||
SetupIconFile=igniter\openpype.ico
|
||||
OutputDir=build\
|
||||
Compression=lzma
|
||||
Compression=lzma2
|
||||
SolidCompression=yes
|
||||
WizardStyle=modern
|
||||
|
||||
|
|
@ -37,6 +38,11 @@ Name: "english"; MessagesFile: "compiler:Default.isl"
|
|||
[Tasks]
|
||||
Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked
|
||||
|
||||
[InstallDelete]
|
||||
; clean everything in previous installation folder
|
||||
Type: filesandordirs; Name: "{app}\*"
|
||||
|
||||
|
||||
[Files]
|
||||
Source: "build\{#build}\*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs createallsubdirs
|
||||
; NOTE: Don't use "Flags: ignoreversion" on any shared system files
|
||||
|
|
|
|||
|
|
@ -1,102 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Pype module."""
|
||||
import os
|
||||
import platform
|
||||
import logging
|
||||
|
||||
from .settings import get_project_settings
|
||||
from .lib import (
|
||||
Anatomy,
|
||||
filter_pyblish_plugins,
|
||||
change_timer_to_current_context,
|
||||
register_event_callback,
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
PACKAGE_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
|
||||
|
||||
# Global plugin paths
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
|
||||
|
||||
def install():
|
||||
"""Install OpenPype to Avalon."""
|
||||
import avalon.api
|
||||
import pyblish.api
|
||||
from pyblish.lib import MessageHandler
|
||||
from openpype.modules import load_modules
|
||||
from openpype.pipeline import (
|
||||
register_loader_plugin_path,
|
||||
register_inventory_action,
|
||||
register_creator_plugin_path,
|
||||
)
|
||||
|
||||
# Make sure modules are loaded
|
||||
load_modules()
|
||||
|
||||
def modified_emit(obj, record):
|
||||
"""Method replacing `emit` in Pyblish's MessageHandler."""
|
||||
record.msg = record.getMessage()
|
||||
obj.records.append(record)
|
||||
|
||||
MessageHandler.emit = modified_emit
|
||||
|
||||
log.info("Registering global plug-ins..")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
pyblish.api.register_discovery_filter(filter_pyblish_plugins)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
|
||||
project_name = os.environ.get("AVALON_PROJECT")
|
||||
|
||||
# Register studio specific plugins
|
||||
if project_name:
|
||||
anatomy = Anatomy(project_name)
|
||||
anatomy.set_root_environments()
|
||||
avalon.api.register_root(anatomy.roots)
|
||||
|
||||
project_settings = get_project_settings(project_name)
|
||||
platform_name = platform.system().lower()
|
||||
project_plugins = (
|
||||
project_settings
|
||||
.get("global", {})
|
||||
.get("project_plugins", {})
|
||||
.get(platform_name)
|
||||
) or []
|
||||
for path in project_plugins:
|
||||
try:
|
||||
path = str(path.format(**os.environ))
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if not path or not os.path.exists(path):
|
||||
continue
|
||||
|
||||
pyblish.api.register_plugin_path(path)
|
||||
register_loader_plugin_path(path)
|
||||
register_creator_plugin_path(path)
|
||||
register_inventory_action(path)
|
||||
|
||||
# apply monkey patched discover to original one
|
||||
log.info("Patching discovery")
|
||||
|
||||
register_event_callback("taskChanged", _on_task_change)
|
||||
|
||||
|
||||
def _on_task_change():
|
||||
change_timer_to_current_context()
|
||||
|
||||
|
||||
def uninstall():
|
||||
"""Uninstall Pype from Avalon."""
|
||||
import pyblish.api
|
||||
from openpype.pipeline import deregister_loader_plugin_path
|
||||
|
||||
log.info("Deregistering global plug-ins..")
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
pyblish.api.deregister_discovery_filter(filter_pyblish_plugins)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
log.info("Global plug-ins unregistred")
|
||||
|
|
|
|||
|
|
@ -3,27 +3,25 @@ from .settings import (
|
|||
get_project_settings,
|
||||
get_current_project_settings,
|
||||
get_anatomy_settings,
|
||||
get_environments,
|
||||
|
||||
SystemSettings,
|
||||
ProjectSettings
|
||||
)
|
||||
from .lib import (
|
||||
PypeLogger,
|
||||
Logger,
|
||||
Anatomy,
|
||||
config,
|
||||
execute,
|
||||
run_subprocess,
|
||||
version_up,
|
||||
get_asset,
|
||||
get_hierarchy,
|
||||
get_workdir_data,
|
||||
get_version_from_path,
|
||||
get_last_version_from_path,
|
||||
get_app_environments_for_context,
|
||||
source_hash,
|
||||
get_latest_version,
|
||||
get_global_environments,
|
||||
get_local_site_id,
|
||||
change_openpype_mongo_url,
|
||||
create_project_folders,
|
||||
|
|
@ -61,18 +59,16 @@ from .action import (
|
|||
RepairContextAction
|
||||
)
|
||||
|
||||
# for backward compatibility with Pype 2
|
||||
Logger = PypeLogger
|
||||
|
||||
__all__ = [
|
||||
"get_system_settings",
|
||||
"get_project_settings",
|
||||
"get_current_project_settings",
|
||||
"get_anatomy_settings",
|
||||
"get_environments",
|
||||
"get_project_basic_paths",
|
||||
|
||||
"SystemSettings",
|
||||
"ProjectSettings",
|
||||
|
||||
"PypeLogger",
|
||||
"Logger",
|
||||
|
|
@ -102,8 +98,8 @@ __all__ = [
|
|||
|
||||
# get contextual data
|
||||
"version_up",
|
||||
"get_hierarchy",
|
||||
"get_asset",
|
||||
"get_workdir_data",
|
||||
"get_version_from_path",
|
||||
"get_last_version_from_path",
|
||||
"get_app_environments_for_context",
|
||||
|
|
@ -111,7 +107,6 @@ __all__ = [
|
|||
|
||||
"run_subprocess",
|
||||
"get_latest_version",
|
||||
"get_global_environments",
|
||||
|
||||
"get_local_site_id",
|
||||
"change_openpype_mongo_url",
|
||||
|
|
|
|||
111
openpype/cli.py
111
openpype/cli.py
|
|
@ -2,7 +2,7 @@
|
|||
"""Package for handling pype command line arguments."""
|
||||
import os
|
||||
import sys
|
||||
|
||||
import code
|
||||
import click
|
||||
|
||||
# import sys
|
||||
|
|
@ -20,6 +20,10 @@ from .pype_commands import PypeCommands
|
|||
"to list staging versions."))
|
||||
@click.option("--validate-version", expose_value=False,
|
||||
help="validate given version integrity")
|
||||
@click.option("--debug", is_flag=True, expose_value=False,
|
||||
help=("Enable debug"))
|
||||
@click.option("--verbose", expose_value=False,
|
||||
help=("Change OpenPype log level (debug - critical or 0-50)"))
|
||||
def main(ctx):
|
||||
"""Pype is main command serving as entry point to pipeline system.
|
||||
|
||||
|
|
@ -37,30 +41,13 @@ def settings(dev):
|
|||
|
||||
|
||||
@main.command()
|
||||
def standalonepublisher():
|
||||
"""Show Pype Standalone publisher UI."""
|
||||
PypeCommands().launch_standalone_publisher()
|
||||
|
||||
|
||||
@main.command()
|
||||
def traypublisher():
|
||||
"""Show new OpenPype Standalone publisher UI."""
|
||||
PypeCommands().launch_traypublisher()
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.option("-d", "--debug",
|
||||
is_flag=True, help=("Run pype tray in debug mode"))
|
||||
def tray(debug=False):
|
||||
def tray():
|
||||
"""Launch pype tray.
|
||||
|
||||
Default action of pype command is to launch tray widget to control basic
|
||||
aspects of pype. See documentation for more information.
|
||||
|
||||
Running pype with `--debug` will result in lot of information useful for
|
||||
debugging to be shown in console.
|
||||
"""
|
||||
PypeCommands().launch_tray(debug)
|
||||
PypeCommands().launch_tray()
|
||||
|
||||
|
||||
@PypeCommands.add_modules
|
||||
|
|
@ -75,7 +62,6 @@ def module(ctx):
|
|||
|
||||
|
||||
@main.command()
|
||||
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
|
||||
@click.option("--ftrack-url", envvar="FTRACK_SERVER",
|
||||
help="Ftrack server url")
|
||||
@click.option("--ftrack-user", envvar="FTRACK_API_USER",
|
||||
|
|
@ -88,8 +74,7 @@ def module(ctx):
|
|||
help="Clockify API key.")
|
||||
@click.option("--clockify-workspace", envvar="CLOCKIFY_WORKSPACE",
|
||||
help="Clockify workspace")
|
||||
def eventserver(debug,
|
||||
ftrack_url,
|
||||
def eventserver(ftrack_url,
|
||||
ftrack_user,
|
||||
ftrack_api_key,
|
||||
legacy,
|
||||
|
|
@ -100,8 +85,6 @@ def eventserver(debug,
|
|||
This should be ideally used by system service (such us systemd or upstart
|
||||
on linux and window service).
|
||||
"""
|
||||
if debug:
|
||||
os.environ["OPENPYPE_DEBUG"] = "1"
|
||||
|
||||
PypeCommands().launch_eventservercli(
|
||||
ftrack_url,
|
||||
|
|
@ -114,12 +97,11 @@ def eventserver(debug,
|
|||
|
||||
|
||||
@main.command()
|
||||
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
|
||||
@click.option("-h", "--host", help="Host", default=None)
|
||||
@click.option("-p", "--port", help="Port", default=None)
|
||||
@click.option("-e", "--executable", help="Executable")
|
||||
@click.option("-u", "--upload_dir", help="Upload dir")
|
||||
def webpublisherwebserver(debug, executable, upload_dir, host=None, port=None):
|
||||
def webpublisherwebserver(executable, upload_dir, host=None, port=None):
|
||||
"""Starts webserver for communication with Webpublish FR via command line
|
||||
|
||||
OP must be congigured on a machine, eg. OPENPYPE_MONGO filled AND
|
||||
|
|
@ -127,8 +109,6 @@ def webpublisherwebserver(debug, executable, upload_dir, host=None, port=None):
|
|||
|
||||
Expect "pype.club" user created on Ftrack.
|
||||
"""
|
||||
if debug:
|
||||
os.environ["OPENPYPE_DEBUG"] = "1"
|
||||
|
||||
PypeCommands().launch_webpublisher_webservercli(
|
||||
upload_dir=upload_dir,
|
||||
|
|
@ -164,38 +144,34 @@ def extractenvironments(output_json_path, project, asset, task, app, envgroup):
|
|||
|
||||
@main.command()
|
||||
@click.argument("paths", nargs=-1)
|
||||
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
|
||||
@click.option("-t", "--targets", help="Targets module", default=None,
|
||||
multiple=True)
|
||||
@click.option("-g", "--gui", is_flag=True,
|
||||
help="Show Publish UI", default=False)
|
||||
def publish(debug, paths, targets, gui):
|
||||
def publish(paths, targets, gui):
|
||||
"""Start CLI publishing.
|
||||
|
||||
Publish collects json from paths provided as an argument.
|
||||
More than one path is allowed.
|
||||
"""
|
||||
if debug:
|
||||
os.environ["OPENPYPE_DEBUG"] = "1"
|
||||
|
||||
PypeCommands.publish(list(paths), targets, gui)
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.argument("path")
|
||||
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
|
||||
@click.option("-h", "--host", help="Host")
|
||||
@click.option("-u", "--user", help="User email address")
|
||||
@click.option("-p", "--project", help="Project")
|
||||
@click.option("-t", "--targets", help="Targets", default=None,
|
||||
multiple=True)
|
||||
def remotepublishfromapp(debug, project, path, host, user=None, targets=None):
|
||||
def remotepublishfromapp(project, path, host, user=None, targets=None):
|
||||
"""Start CLI publishing.
|
||||
|
||||
Publish collects json from paths provided as an argument.
|
||||
More than one path is allowed.
|
||||
"""
|
||||
if debug:
|
||||
os.environ["OPENPYPE_DEBUG"] = "1"
|
||||
|
||||
PypeCommands.remotepublishfromapp(
|
||||
project, path, host, user, targets=targets
|
||||
)
|
||||
|
|
@ -203,24 +179,21 @@ def remotepublishfromapp(debug, project, path, host, user=None, targets=None):
|
|||
|
||||
@main.command()
|
||||
@click.argument("path")
|
||||
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
|
||||
@click.option("-u", "--user", help="User email address")
|
||||
@click.option("-p", "--project", help="Project")
|
||||
@click.option("-t", "--targets", help="Targets", default=None,
|
||||
multiple=True)
|
||||
def remotepublish(debug, project, path, user=None, targets=None):
|
||||
def remotepublish(project, path, user=None, targets=None):
|
||||
"""Start CLI publishing.
|
||||
|
||||
Publish collects json from paths provided as an argument.
|
||||
More than one path is allowed.
|
||||
"""
|
||||
if debug:
|
||||
os.environ["OPENPYPE_DEBUG"] = "1"
|
||||
|
||||
PypeCommands.remotepublish(project, path, user, targets=targets)
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
|
||||
@click.option("-p", "--project", required=True,
|
||||
help="name of project asset is under")
|
||||
@click.option("-a", "--asset", required=True,
|
||||
|
|
@ -228,7 +201,7 @@ def remotepublish(debug, project, path, user=None, targets=None):
|
|||
@click.option("--path", required=True,
|
||||
help="path where textures are found",
|
||||
type=click.Path(exists=True))
|
||||
def texturecopy(debug, project, asset, path):
|
||||
def texturecopy(project, asset, path):
|
||||
"""Copy specified textures to provided asset path.
|
||||
|
||||
It validates if project and asset exists. Then it will use speedcopy to
|
||||
|
|
@ -239,8 +212,7 @@ def texturecopy(debug, project, asset, path):
|
|||
Result will be copied without directory structure so it will be flat then.
|
||||
Nothing is written to database.
|
||||
"""
|
||||
if debug:
|
||||
os.environ["OPENPYPE_DEBUG"] = "1"
|
||||
|
||||
PypeCommands().texture_copy(project, asset, path)
|
||||
|
||||
|
||||
|
|
@ -389,11 +361,9 @@ def runtests(folder, mark, pyargs, test_data_folder, persist, app_variant,
|
|||
|
||||
|
||||
@main.command()
|
||||
@click.option("-d", "--debug",
|
||||
is_flag=True, help=("Run process in debug mode"))
|
||||
@click.option("-a", "--active_site", required=True,
|
||||
help="Name of active stie")
|
||||
def syncserver(debug, active_site):
|
||||
def syncserver(active_site):
|
||||
"""Run sync site server in background.
|
||||
|
||||
Some Site Sync use cases need to expose site to another one.
|
||||
|
|
@ -408,8 +378,7 @@ def syncserver(debug, active_site):
|
|||
Settings (configured by starting OP Tray with env
|
||||
var OPENPYPE_LOCAL_ID set to 'active_site'.
|
||||
"""
|
||||
if debug:
|
||||
os.environ["OPENPYPE_DEBUG"] = "1"
|
||||
|
||||
PypeCommands().syncserver(active_site)
|
||||
|
||||
|
||||
|
|
@ -443,3 +412,45 @@ def pack_project(project, dirpath):
|
|||
def unpack_project(zipfile, root):
|
||||
"""Create a package of project with all files and database dump."""
|
||||
PypeCommands().unpack_project(zipfile, root)
|
||||
|
||||
|
||||
@main.command()
|
||||
def interactive():
|
||||
"""Interative (Python like) console.
|
||||
|
||||
Helpfull command not only for development to directly work with python
|
||||
interpreter.
|
||||
|
||||
Warning:
|
||||
Executable 'openpype_gui' on windows won't work.
|
||||
"""
|
||||
|
||||
from openpype.version import __version__
|
||||
|
||||
banner = "OpenPype {}\nPython {} on {}".format(
|
||||
__version__, sys.version, sys.platform
|
||||
)
|
||||
code.interact(banner)
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.option("--build", help="Print only build version",
|
||||
is_flag=True, default=False)
|
||||
def version(build):
|
||||
"""Print OpenPype version."""
|
||||
|
||||
from openpype.version import __version__
|
||||
from igniter.bootstrap_repos import BootstrapRepos, OpenPypeVersion
|
||||
from pathlib import Path
|
||||
import os
|
||||
|
||||
if getattr(sys, 'frozen', False):
|
||||
local_version = BootstrapRepos.get_version(
|
||||
Path(os.getenv("OPENPYPE_ROOT")))
|
||||
else:
|
||||
local_version = OpenPypeVersion.get_installed_version_str()
|
||||
|
||||
if build:
|
||||
print(local_version)
|
||||
return
|
||||
print(f"{__version__} (booted: {local_version})")
|
||||
|
|
|
|||
91
openpype/client/__init__.py
Normal file
91
openpype/client/__init__.py
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
from .mongo import (
|
||||
OpenPypeMongoConnection,
|
||||
)
|
||||
|
||||
from .entities import (
|
||||
get_projects,
|
||||
get_project,
|
||||
get_whole_project,
|
||||
|
||||
get_asset_by_id,
|
||||
get_asset_by_name,
|
||||
get_assets,
|
||||
get_archived_assets,
|
||||
get_asset_ids_with_subsets,
|
||||
|
||||
get_subset_by_id,
|
||||
get_subset_by_name,
|
||||
get_subsets,
|
||||
get_subset_families,
|
||||
|
||||
get_version_by_id,
|
||||
get_version_by_name,
|
||||
get_versions,
|
||||
get_hero_version_by_id,
|
||||
get_hero_version_by_subset_id,
|
||||
get_hero_versions,
|
||||
get_last_versions,
|
||||
get_last_version_by_subset_id,
|
||||
get_last_version_by_subset_name,
|
||||
get_output_link_versions,
|
||||
|
||||
version_is_latest,
|
||||
|
||||
get_representation_by_id,
|
||||
get_representation_by_name,
|
||||
get_representations,
|
||||
get_representation_parents,
|
||||
get_representations_parents,
|
||||
get_archived_representations,
|
||||
|
||||
get_thumbnail,
|
||||
get_thumbnails,
|
||||
get_thumbnail_id_from_source,
|
||||
|
||||
get_workfile_info,
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
"OpenPypeMongoConnection",
|
||||
|
||||
"get_projects",
|
||||
"get_project",
|
||||
"get_whole_project",
|
||||
|
||||
"get_asset_by_id",
|
||||
"get_asset_by_name",
|
||||
"get_assets",
|
||||
"get_archived_assets",
|
||||
"get_asset_ids_with_subsets",
|
||||
|
||||
"get_subset_by_id",
|
||||
"get_subset_by_name",
|
||||
"get_subsets",
|
||||
"get_subset_families",
|
||||
|
||||
"get_version_by_id",
|
||||
"get_version_by_name",
|
||||
"get_versions",
|
||||
"get_hero_version_by_id",
|
||||
"get_hero_version_by_subset_id",
|
||||
"get_hero_versions",
|
||||
"get_last_versions",
|
||||
"get_last_version_by_subset_id",
|
||||
"get_last_version_by_subset_name",
|
||||
"get_output_link_versions",
|
||||
|
||||
"version_is_latest",
|
||||
|
||||
"get_representation_by_id",
|
||||
"get_representation_by_name",
|
||||
"get_representations",
|
||||
"get_representation_parents",
|
||||
"get_representations_parents",
|
||||
"get_archived_representations",
|
||||
|
||||
"get_thumbnail",
|
||||
"get_thumbnails",
|
||||
"get_thumbnail_id_from_source",
|
||||
|
||||
"get_workfile_info",
|
||||
)
|
||||
1479
openpype/client/entities.py
Normal file
1479
openpype/client/entities.py
Normal file
File diff suppressed because it is too large
Load diff
235
openpype/client/mongo.py
Normal file
235
openpype/client/mongo.py
Normal file
|
|
@ -0,0 +1,235 @@
|
|||
import os
|
||||
import sys
|
||||
import time
|
||||
import logging
|
||||
import pymongo
|
||||
import certifi
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
from urlparse import urlparse, parse_qs
|
||||
else:
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
|
||||
|
||||
class MongoEnvNotSet(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _decompose_url(url):
|
||||
"""Decompose mongo url to basic components.
|
||||
|
||||
Used for creation of MongoHandler which expect mongo url components as
|
||||
separated kwargs. Components are at the end not used as we're setting
|
||||
connection directly this is just a dumb components for MongoHandler
|
||||
validation pass.
|
||||
"""
|
||||
|
||||
# Use first url from passed url
|
||||
# - this is because it is possible to pass multiple urls for multiple
|
||||
# replica sets which would crash on urlparse otherwise
|
||||
# - please don't use comma in username of password
|
||||
url = url.split(",")[0]
|
||||
components = {
|
||||
"scheme": None,
|
||||
"host": None,
|
||||
"port": None,
|
||||
"username": None,
|
||||
"password": None,
|
||||
"auth_db": None
|
||||
}
|
||||
|
||||
result = urlparse(url)
|
||||
if result.scheme is None:
|
||||
_url = "mongodb://{}".format(url)
|
||||
result = urlparse(_url)
|
||||
|
||||
components["scheme"] = result.scheme
|
||||
components["host"] = result.hostname
|
||||
try:
|
||||
components["port"] = result.port
|
||||
except ValueError:
|
||||
raise RuntimeError("invalid port specified")
|
||||
components["username"] = result.username
|
||||
components["password"] = result.password
|
||||
|
||||
try:
|
||||
components["auth_db"] = parse_qs(result.query)['authSource'][0]
|
||||
except KeyError:
|
||||
# no auth db provided, mongo will use the one we are connecting to
|
||||
pass
|
||||
|
||||
return components
|
||||
|
||||
|
||||
def get_default_components():
|
||||
mongo_url = os.environ.get("OPENPYPE_MONGO")
|
||||
if mongo_url is None:
|
||||
raise MongoEnvNotSet(
|
||||
"URL for Mongo logging connection is not set."
|
||||
)
|
||||
return _decompose_url(mongo_url)
|
||||
|
||||
|
||||
def should_add_certificate_path_to_mongo_url(mongo_url):
|
||||
"""Check if should add ca certificate to mongo url.
|
||||
|
||||
Since 30.9.2021 cloud mongo requires newer certificates that are not
|
||||
available on most of workstation. This adds path to certifi certificate
|
||||
which is valid for it. To add the certificate path url must have scheme
|
||||
'mongodb+srv' or has 'ssl=true' or 'tls=true' in url query.
|
||||
"""
|
||||
|
||||
parsed = urlparse(mongo_url)
|
||||
query = parse_qs(parsed.query)
|
||||
lowered_query_keys = set(key.lower() for key in query.keys())
|
||||
add_certificate = False
|
||||
# Check if url 'ssl' or 'tls' are set to 'true'
|
||||
for key in ("ssl", "tls"):
|
||||
if key in query and "true" in query["ssl"]:
|
||||
add_certificate = True
|
||||
break
|
||||
|
||||
# Check if url contains 'mongodb+srv'
|
||||
if not add_certificate and parsed.scheme == "mongodb+srv":
|
||||
add_certificate = True
|
||||
|
||||
# Check if url does already contain certificate path
|
||||
if add_certificate and "tlscafile" in lowered_query_keys:
|
||||
add_certificate = False
|
||||
|
||||
return add_certificate
|
||||
|
||||
|
||||
def validate_mongo_connection(mongo_uri):
|
||||
"""Check if provided mongodb URL is valid.
|
||||
|
||||
Args:
|
||||
mongo_uri (str): URL to validate.
|
||||
|
||||
Raises:
|
||||
ValueError: When port in mongo uri is not valid.
|
||||
pymongo.errors.InvalidURI: If passed mongo is invalid.
|
||||
pymongo.errors.ServerSelectionTimeoutError: If connection timeout
|
||||
passed so probably couldn't connect to mongo server.
|
||||
|
||||
"""
|
||||
|
||||
client = OpenPypeMongoConnection.create_connection(
|
||||
mongo_uri, retry_attempts=1
|
||||
)
|
||||
client.close()
|
||||
|
||||
|
||||
class OpenPypeMongoConnection:
|
||||
"""Singleton MongoDB connection.
|
||||
|
||||
Keeps MongoDB connections by url.
|
||||
"""
|
||||
|
||||
mongo_clients = {}
|
||||
log = logging.getLogger("OpenPypeMongoConnection")
|
||||
|
||||
@staticmethod
|
||||
def get_default_mongo_url():
|
||||
return os.environ["OPENPYPE_MONGO"]
|
||||
|
||||
@classmethod
|
||||
def get_mongo_client(cls, mongo_url=None):
|
||||
if mongo_url is None:
|
||||
mongo_url = cls.get_default_mongo_url()
|
||||
|
||||
connection = cls.mongo_clients.get(mongo_url)
|
||||
if connection:
|
||||
# Naive validation of existing connection
|
||||
try:
|
||||
connection.server_info()
|
||||
with connection.start_session():
|
||||
pass
|
||||
except Exception:
|
||||
connection = None
|
||||
|
||||
if not connection:
|
||||
cls.log.debug("Creating mongo connection to {}".format(mongo_url))
|
||||
connection = cls.create_connection(mongo_url)
|
||||
cls.mongo_clients[mongo_url] = connection
|
||||
|
||||
return connection
|
||||
|
||||
@classmethod
|
||||
def create_connection(cls, mongo_url, timeout=None, retry_attempts=None):
|
||||
parsed = urlparse(mongo_url)
|
||||
# Force validation of scheme
|
||||
if parsed.scheme not in ["mongodb", "mongodb+srv"]:
|
||||
raise pymongo.errors.InvalidURI((
|
||||
"Invalid URI scheme:"
|
||||
" URI must begin with 'mongodb://' or 'mongodb+srv://'"
|
||||
))
|
||||
|
||||
if timeout is None:
|
||||
timeout = int(os.environ.get("AVALON_TIMEOUT") or 1000)
|
||||
|
||||
kwargs = {
|
||||
"serverSelectionTimeoutMS": timeout
|
||||
}
|
||||
if should_add_certificate_path_to_mongo_url(mongo_url):
|
||||
kwargs["ssl_ca_certs"] = certifi.where()
|
||||
|
||||
mongo_client = pymongo.MongoClient(mongo_url, **kwargs)
|
||||
|
||||
if retry_attempts is None:
|
||||
retry_attempts = 3
|
||||
|
||||
elif not retry_attempts:
|
||||
retry_attempts = 1
|
||||
|
||||
last_exc = None
|
||||
valid = False
|
||||
t1 = time.time()
|
||||
for attempt in range(1, retry_attempts + 1):
|
||||
try:
|
||||
mongo_client.server_info()
|
||||
with mongo_client.start_session():
|
||||
pass
|
||||
valid = True
|
||||
break
|
||||
|
||||
except Exception as exc:
|
||||
last_exc = exc
|
||||
if attempt < retry_attempts:
|
||||
cls.log.warning(
|
||||
"Attempt {} failed. Retrying... ".format(attempt)
|
||||
)
|
||||
time.sleep(1)
|
||||
|
||||
if not valid:
|
||||
raise last_exc
|
||||
|
||||
cls.log.info("Connected to {}, delay {:.3f}s".format(
|
||||
mongo_url, time.time() - t1
|
||||
))
|
||||
return mongo_client
|
||||
|
||||
|
||||
def get_project_database():
|
||||
db_name = os.environ.get("AVALON_DB") or "avalon"
|
||||
return OpenPypeMongoConnection.get_mongo_client()[db_name]
|
||||
|
||||
|
||||
def get_project_connection(project_name):
|
||||
"""Direct access to mongo collection.
|
||||
|
||||
We're trying to avoid using direct access to mongo. This should be used
|
||||
only for Create, Update and Remove operations until there are implemented
|
||||
api calls for that.
|
||||
|
||||
Args:
|
||||
project_name(str): Project name for which collection should be
|
||||
returned.
|
||||
|
||||
Returns:
|
||||
pymongo.Collection: Collection realated to passed project.
|
||||
"""
|
||||
|
||||
if not project_name:
|
||||
raise ValueError("Invalid project name {}".format(str(project_name)))
|
||||
return get_project_database()[project_name]
|
||||
39
openpype/client/notes.md
Normal file
39
openpype/client/notes.md
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
# Client functionality
|
||||
## Reason
|
||||
Preparation for OpenPype v4 server. Goal is to remove direct mongo calls in code to prepare a little bit for different source of data for code before. To start think about database calls less as mongo calls but more universally. To do so was implemented simple wrapper around database calls to not use pymongo specific code.
|
||||
|
||||
Current goal is not to make universal database model which can be easily replaced with any different source of data but to make it close as possible. Current implementation of OpenPype is too tighly connected to pymongo and it's abilities so we're trying to get closer with long term changes that can be used even in current state.
|
||||
|
||||
## Queries
|
||||
Query functions don't use full potential of mongo queries like very specific queries based on subdictionaries or unknown structures. We try to avoid these calls as much as possible because they'll probably won't be available in future. If it's really necessary a new function can be added but only if it's reasonable for overall logic. All query functions were moved to `~/client/entities.py`. Each function has arguments with available filters and possible reduce of returned keys for each entity.
|
||||
|
||||
## Changes
|
||||
Changes are a little bit complicated. Mongo has many options how update can happen which had to be reduced also it would be at this stage complicated to validate values which are created or updated thus automation is at this point almost none. Changes can be made using operations available in `~/client/operations.py`. Each operation require project name and entity type, but may require operation specific data.
|
||||
|
||||
### Create
|
||||
Create operations expect already prepared document data, for that are prepared functions creating skeletal structures of documents (do not fill all required data), except `_id` all data should be right. Existence of entity is not validated so if the same creation operation is send n times it will create the entity n times which can cause issues.
|
||||
|
||||
### Update
|
||||
Update operation require entity id and keys that should be changed, update dictionary must have {"key": value}. If value should be set in nested dictionary the key must have also all subkeys joined with dot `.` (e.g. `{"data": {"fps": 25}}` -> `{"data.fps": 25}`). To simplify update dictionaries were prepared functions which does that for you, their name has template `prepare_<entity type>_update_data` - they work on comparison of previous document and new document. If there is missing function for requested entity type it is because we didn't need it yet and require implementaion.
|
||||
|
||||
### Delete
|
||||
Delete operation need entity id. Entity will be deleted from mongo.
|
||||
|
||||
|
||||
## What (probably) won't be replaced
|
||||
Some parts of code are still using direct mongo calls. In most of cases it is for very specific calls that are module specific or their usage will completely change in future.
|
||||
- Mongo calls that are not project specific (out of `avalon` collection) will be removed or will have to use different mechanism how the data are stored. At this moment it is related to OpenPype settings and logs, ftrack server events, some other data.
|
||||
- Sync server queries. They're complex and very specific for sync server module. Their replacement will require specific calls to OpenPype server in v4 thus their abstraction with wrapper is irrelevant and would complicate production in v3.
|
||||
- Project managers (ftrack, kitsu, shotgrid, embedded Project Manager, etc.). Project managers are creating, updating or removing assets in v3, but in v4 will create folders with different structure. Wrapping creation of assets would not help to prepare for v4 because of new data structures. The same can be said about editorial Extract Hierarchy Avalon plugin which create project structure.
|
||||
- Code parts that is marked as deprecated in v3 or will be deprecated in v4.
|
||||
- integrate asset legacy publish plugin - already is legacy kept for safety
|
||||
- integrate thumbnail - thumbnails will be stored in different way in v4
|
||||
- input links - link will be stored in different way and will have different mechanism of linking. In v3 are links limited to same entity type "asset <-> asset" or "representation <-> representation".
|
||||
|
||||
## Known missing replacements
|
||||
- change subset group in loader tool
|
||||
- integrate subset group
|
||||
- query input links in openpype lib
|
||||
- create project in openpype lib
|
||||
- save/create workfile doc in openpype lib
|
||||
- integrate hero version
|
||||
664
openpype/client/operations.py
Normal file
664
openpype/client/operations.py
Normal file
|
|
@ -0,0 +1,664 @@
|
|||
import re
|
||||
import uuid
|
||||
import copy
|
||||
import collections
|
||||
from abc import ABCMeta, abstractmethod, abstractproperty
|
||||
|
||||
import six
|
||||
from bson.objectid import ObjectId
|
||||
from pymongo import DeleteOne, InsertOne, UpdateOne
|
||||
|
||||
from .mongo import get_project_connection
|
||||
|
||||
REMOVED_VALUE = object()
|
||||
|
||||
PROJECT_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_"
|
||||
PROJECT_NAME_REGEX = re.compile(
|
||||
"^[{}]+$".format(PROJECT_NAME_ALLOWED_SYMBOLS)
|
||||
)
|
||||
|
||||
CURRENT_PROJECT_SCHEMA = "openpype:project-3.0"
|
||||
CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0"
|
||||
CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0"
|
||||
CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0"
|
||||
CURRENT_VERSION_SCHEMA = "openpype:version-3.0"
|
||||
CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0"
|
||||
CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0"
|
||||
CURRENT_THUMBNAIL_SCHEMA = "openpype:thumbnail-1.0"
|
||||
|
||||
|
||||
def _create_or_convert_to_mongo_id(mongo_id):
|
||||
if mongo_id is None:
|
||||
return ObjectId()
|
||||
return ObjectId(mongo_id)
|
||||
|
||||
|
||||
def new_project_document(
|
||||
project_name, project_code, config, data=None, entity_id=None
|
||||
):
|
||||
"""Create skeleton data of project document.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project. Used as identifier of a project.
|
||||
project_code (str): Shorter version of projet without spaces and
|
||||
special characters (in most of cases). Should be also considered
|
||||
as unique name across projects.
|
||||
config (Dic[str, Any]): Project config consist of roots, templates,
|
||||
applications and other project Anatomy related data.
|
||||
data (Dict[str, Any]): Project data with information about it's
|
||||
attributes (e.g. 'fps' etc.) or integration specific keys.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of project document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
data["code"] = project_code
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||
"name": project_name,
|
||||
"type": CURRENT_PROJECT_SCHEMA,
|
||||
"entity_data": data,
|
||||
"config": config
|
||||
}
|
||||
|
||||
|
||||
def new_asset_document(
|
||||
name, project_id, parent_id, parents, data=None, entity_id=None
|
||||
):
|
||||
"""Create skeleton data of asset document.
|
||||
|
||||
Args:
|
||||
name (str): Is considered as unique identifier of asset in project.
|
||||
project_id (Union[str, ObjectId]): Id of project doument.
|
||||
parent_id (Union[str, ObjectId]): Id of parent asset.
|
||||
parents (List[str]): List of parent assets names.
|
||||
data (Dict[str, Any]): Asset document data. Empty dictionary is used
|
||||
if not passed. Value of 'parent_id' is used to fill 'visualParent'.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of asset document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
if parent_id is not None:
|
||||
parent_id = ObjectId(parent_id)
|
||||
data["visualParent"] = parent_id
|
||||
data["parents"] = parents
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||
"type": "asset",
|
||||
"name": name,
|
||||
"parent": ObjectId(project_id),
|
||||
"data": data,
|
||||
"schema": CURRENT_ASSET_DOC_SCHEMA
|
||||
}
|
||||
|
||||
|
||||
def new_subset_document(name, family, asset_id, data=None, entity_id=None):
|
||||
"""Create skeleton data of subset document.
|
||||
|
||||
Args:
|
||||
name (str): Is considered as unique identifier of subset under asset.
|
||||
family (str): Subset's family.
|
||||
asset_id (Union[str, ObjectId]): Id of parent asset.
|
||||
data (Dict[str, Any]): Subset document data. Empty dictionary is used
|
||||
if not passed. Value of 'family' is used to fill 'family'.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of subset document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
data["family"] = family
|
||||
return {
|
||||
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||
"schema": CURRENT_SUBSET_SCHEMA,
|
||||
"type": "subset",
|
||||
"name": name,
|
||||
"data": data,
|
||||
"parent": asset_id
|
||||
}
|
||||
|
||||
|
||||
def new_version_doc(version, subset_id, data=None, entity_id=None):
|
||||
"""Create skeleton data of version document.
|
||||
|
||||
Args:
|
||||
version (int): Is considered as unique identifier of version
|
||||
under subset.
|
||||
subset_id (Union[str, ObjectId]): Id of parent subset.
|
||||
data (Dict[str, Any]): Version document data.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of version document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||
"schema": CURRENT_VERSION_SCHEMA,
|
||||
"type": "version",
|
||||
"name": int(version),
|
||||
"parent": subset_id,
|
||||
"data": data
|
||||
}
|
||||
|
||||
|
||||
def new_representation_doc(
|
||||
name, version_id, context, data=None, entity_id=None
|
||||
):
|
||||
"""Create skeleton data of asset document.
|
||||
|
||||
Args:
|
||||
version (int): Is considered as unique identifier of version
|
||||
under subset.
|
||||
version_id (Union[str, ObjectId]): Id of parent version.
|
||||
context (Dict[str, Any]): Representation context used for fill template
|
||||
of to query.
|
||||
data (Dict[str, Any]): Representation document data.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of version document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||
"schema": CURRENT_REPRESENTATION_SCHEMA,
|
||||
"type": "representation",
|
||||
"parent": version_id,
|
||||
"name": name,
|
||||
"data": data,
|
||||
|
||||
# Imprint shortcut to context for performance reasons.
|
||||
"context": context
|
||||
}
|
||||
|
||||
|
||||
def new_thumbnail_doc(data=None, entity_id=None):
|
||||
"""Create skeleton data of thumbnail document.
|
||||
|
||||
Args:
|
||||
data (Dict[str, Any]): Thumbnail document data.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of thumbnail document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||
"type": "thumbnail",
|
||||
"schema": CURRENT_THUMBNAIL_SCHEMA,
|
||||
"data": data
|
||||
}
|
||||
|
||||
|
||||
def new_workfile_info_doc(
|
||||
filename, asset_id, task_name, files, data=None, entity_id=None
|
||||
):
|
||||
"""Create skeleton data of workfile info document.
|
||||
|
||||
Workfile document is at this moment used primarily for artist notes.
|
||||
|
||||
Args:
|
||||
filename (str): Filename of workfile.
|
||||
asset_id (Union[str, ObjectId]): Id of asset under which workfile live.
|
||||
task_name (str): Task under which was workfile created.
|
||||
files (List[str]): List of rootless filepaths related to workfile.
|
||||
data (Dict[str, Any]): Additional metadata.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of workfile info document.
|
||||
"""
|
||||
|
||||
if not data:
|
||||
data = {}
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||
"type": "workfile",
|
||||
"parent": ObjectId(asset_id),
|
||||
"task_name": task_name,
|
||||
"filename": filename,
|
||||
"data": data,
|
||||
"files": files
|
||||
}
|
||||
|
||||
|
||||
def _prepare_update_data(old_doc, new_doc, replace):
|
||||
changes = {}
|
||||
for key, value in new_doc.items():
|
||||
if key not in old_doc or value != old_doc[key]:
|
||||
changes[key] = value
|
||||
|
||||
if replace:
|
||||
for key in old_doc.keys():
|
||||
if key not in new_doc:
|
||||
changes[key] = REMOVED_VALUE
|
||||
return changes
|
||||
|
||||
|
||||
def prepare_subset_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two subset documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for 'UpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
return _prepare_update_data(old_doc, new_doc, replace)
|
||||
|
||||
|
||||
def prepare_version_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two version documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for 'UpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
return _prepare_update_data(old_doc, new_doc, replace)
|
||||
|
||||
|
||||
def prepare_representation_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two representation documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for 'UpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
return _prepare_update_data(old_doc, new_doc, replace)
|
||||
|
||||
|
||||
def prepare_workfile_info_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two workfile info documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for 'UpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
return _prepare_update_data(old_doc, new_doc, replace)
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class AbstractOperation(object):
|
||||
"""Base operation class.
|
||||
|
||||
Opration represent a call into database. The call can create, change or
|
||||
remove data.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
"""
|
||||
|
||||
def __init__(self, project_name, entity_type):
|
||||
self._project_name = project_name
|
||||
self._entity_type = entity_type
|
||||
self._id = str(uuid.uuid4())
|
||||
|
||||
@property
|
||||
def project_name(self):
|
||||
return self._project_name
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
"""Identifier of operation."""
|
||||
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def entity_type(self):
|
||||
return self._entity_type
|
||||
|
||||
@abstractproperty
|
||||
def operation_name(self):
|
||||
"""Stringified type of operation."""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def to_mongo_operation(self):
|
||||
"""Convert operation to Mongo batch operation."""
|
||||
|
||||
pass
|
||||
|
||||
def to_data(self):
|
||||
"""Convert opration to data that can be converted to json or others.
|
||||
|
||||
Warning:
|
||||
Current state returns ObjectId objects which cannot be parsed by
|
||||
json.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Description of operation.
|
||||
"""
|
||||
|
||||
return {
|
||||
"id": self._id,
|
||||
"entity_type": self.entity_type,
|
||||
"project_name": self.project_name,
|
||||
"operation": self.operation_name
|
||||
}
|
||||
|
||||
|
||||
class CreateOperation(AbstractOperation):
|
||||
"""Opeartion to create an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
data (Dict[str, Any]): Data of entity that will be created.
|
||||
"""
|
||||
|
||||
operation_name = "create"
|
||||
|
||||
def __init__(self, project_name, entity_type, data):
|
||||
super(CreateOperation, self).__init__(project_name, entity_type)
|
||||
|
||||
if not data:
|
||||
data = {}
|
||||
else:
|
||||
data = copy.deepcopy(dict(data))
|
||||
|
||||
if "_id" not in data:
|
||||
data["_id"] = ObjectId()
|
||||
else:
|
||||
data["_id"] = ObjectId(data["_id"])
|
||||
|
||||
self._entity_id = data["_id"]
|
||||
self._data = data
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.set_value(key, value)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.data[key]
|
||||
|
||||
def set_value(self, key, value):
|
||||
self.data[key] = value
|
||||
|
||||
def get(self, key, *args, **kwargs):
|
||||
return self.data.get(key, *args, **kwargs)
|
||||
|
||||
@property
|
||||
def entity_id(self):
|
||||
return self._entity_id
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
return self._data
|
||||
|
||||
def to_mongo_operation(self):
|
||||
return InsertOne(copy.deepcopy(self._data))
|
||||
|
||||
def to_data(self):
|
||||
output = super(CreateOperation, self).to_data()
|
||||
output["data"] = copy.deepcopy(self.data)
|
||||
return output
|
||||
|
||||
|
||||
class UpdateOperation(AbstractOperation):
|
||||
"""Opeartion to update an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
entity_id (Union[str, ObjectId]): Identifier of an entity.
|
||||
update_data (Dict[str, Any]): Key -> value changes that will be set in
|
||||
database. If value is set to 'REMOVED_VALUE' the key will be
|
||||
removed. Only first level of dictionary is checked (on purpose).
|
||||
"""
|
||||
|
||||
operation_name = "update"
|
||||
|
||||
def __init__(self, project_name, entity_type, entity_id, update_data):
|
||||
super(UpdateOperation, self).__init__(project_name, entity_type)
|
||||
|
||||
self._entity_id = ObjectId(entity_id)
|
||||
self._update_data = update_data
|
||||
|
||||
@property
|
||||
def entity_id(self):
|
||||
return self._entity_id
|
||||
|
||||
@property
|
||||
def update_data(self):
|
||||
return self._update_data
|
||||
|
||||
def to_mongo_operation(self):
|
||||
unset_data = {}
|
||||
set_data = {}
|
||||
for key, value in self._update_data.items():
|
||||
if value is REMOVED_VALUE:
|
||||
unset_data[key] = None
|
||||
else:
|
||||
set_data[key] = value
|
||||
|
||||
op_data = {}
|
||||
if unset_data:
|
||||
op_data["$unset"] = unset_data
|
||||
if set_data:
|
||||
op_data["$set"] = set_data
|
||||
|
||||
if not op_data:
|
||||
return None
|
||||
|
||||
return UpdateOne(
|
||||
{"_id": self.entity_id},
|
||||
op_data
|
||||
)
|
||||
|
||||
def to_data(self):
|
||||
changes = {}
|
||||
for key, value in self._update_data.items():
|
||||
if value is REMOVED_VALUE:
|
||||
value = None
|
||||
changes[key] = value
|
||||
|
||||
output = super(UpdateOperation, self).to_data()
|
||||
output.update({
|
||||
"entity_id": self.entity_id,
|
||||
"changes": changes
|
||||
})
|
||||
return output
|
||||
|
||||
|
||||
class DeleteOperation(AbstractOperation):
|
||||
"""Opeartion to delete an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
entity_id (Union[str, ObjectId]): Entity id that will be removed.
|
||||
"""
|
||||
|
||||
operation_name = "delete"
|
||||
|
||||
def __init__(self, project_name, entity_type, entity_id):
|
||||
super(DeleteOperation, self).__init__(project_name, entity_type)
|
||||
|
||||
self._entity_id = ObjectId(entity_id)
|
||||
|
||||
@property
|
||||
def entity_id(self):
|
||||
return self._entity_id
|
||||
|
||||
def to_mongo_operation(self):
|
||||
return DeleteOne({"_id": self.entity_id})
|
||||
|
||||
def to_data(self):
|
||||
output = super(DeleteOperation, self).to_data()
|
||||
output["entity_id"] = self.entity_id
|
||||
return output
|
||||
|
||||
|
||||
class OperationsSession(object):
|
||||
"""Session storing operations that should happen in an order.
|
||||
|
||||
At this moment does not handle anything special can be sonsidered as
|
||||
stupid list of operations that will happen after each other. If creation
|
||||
of same entity is there multiple times it's handled in any way and document
|
||||
values are not validated.
|
||||
|
||||
All operations must be related to single project.
|
||||
|
||||
Args:
|
||||
project_name (str): Project name to which are operations related.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._operations = []
|
||||
|
||||
def add(self, operation):
|
||||
"""Add operation to be processed.
|
||||
|
||||
Args:
|
||||
operation (BaseOperation): Operation that should be processed.
|
||||
"""
|
||||
if not isinstance(
|
||||
operation,
|
||||
(CreateOperation, UpdateOperation, DeleteOperation)
|
||||
):
|
||||
raise TypeError("Expected Operation object got {}".format(
|
||||
str(type(operation))
|
||||
))
|
||||
|
||||
self._operations.append(operation)
|
||||
|
||||
def append(self, operation):
|
||||
"""Add operation to be processed.
|
||||
|
||||
Args:
|
||||
operation (BaseOperation): Operation that should be processed.
|
||||
"""
|
||||
|
||||
self.add(operation)
|
||||
|
||||
def extend(self, operations):
|
||||
"""Add operations to be processed.
|
||||
|
||||
Args:
|
||||
operations (List[BaseOperation]): Operations that should be
|
||||
processed.
|
||||
"""
|
||||
|
||||
for operation in operations:
|
||||
self.add(operation)
|
||||
|
||||
def remove(self, operation):
|
||||
"""Remove operation."""
|
||||
|
||||
self._operations.remove(operation)
|
||||
|
||||
def clear(self):
|
||||
"""Clear all registered operations."""
|
||||
|
||||
self._operations = []
|
||||
|
||||
def to_data(self):
|
||||
return [
|
||||
operation.to_data()
|
||||
for operation in self._operations
|
||||
]
|
||||
|
||||
def commit(self):
|
||||
"""Commit session operations."""
|
||||
|
||||
operations, self._operations = self._operations, []
|
||||
if not operations:
|
||||
return
|
||||
|
||||
operations_by_project = collections.defaultdict(list)
|
||||
for operation in operations:
|
||||
operations_by_project[operation.project_name].append(operation)
|
||||
|
||||
for project_name, operations in operations_by_project.items():
|
||||
bulk_writes = []
|
||||
for operation in operations:
|
||||
mongo_op = operation.to_mongo_operation()
|
||||
if mongo_op is not None:
|
||||
bulk_writes.append(mongo_op)
|
||||
|
||||
if bulk_writes:
|
||||
collection = get_project_connection(project_name)
|
||||
collection.bulk_write(bulk_writes)
|
||||
|
||||
def create_entity(self, project_name, entity_type, data):
|
||||
"""Fast access to 'CreateOperation'.
|
||||
|
||||
Returns:
|
||||
CreateOperation: Object of update operation.
|
||||
"""
|
||||
|
||||
operation = CreateOperation(project_name, entity_type, data)
|
||||
self.add(operation)
|
||||
return operation
|
||||
|
||||
def update_entity(self, project_name, entity_type, entity_id, update_data):
|
||||
"""Fast access to 'UpdateOperation'.
|
||||
|
||||
Returns:
|
||||
UpdateOperation: Object of update operation.
|
||||
"""
|
||||
|
||||
operation = UpdateOperation(
|
||||
project_name, entity_type, entity_id, update_data
|
||||
)
|
||||
self.add(operation)
|
||||
return operation
|
||||
|
||||
def delete_entity(self, project_name, entity_type, entity_id):
|
||||
"""Fast access to 'DeleteOperation'.
|
||||
|
||||
Returns:
|
||||
DeleteOperation: Object of delete operation.
|
||||
"""
|
||||
|
||||
operation = DeleteOperation(project_name, entity_type, entity_id)
|
||||
self.add(operation)
|
||||
return operation
|
||||
|
|
@ -1,11 +1,11 @@
|
|||
import os
|
||||
import shutil
|
||||
from openpype.lib import (
|
||||
PreLaunchHook,
|
||||
get_custom_workfile_template_by_context,
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.pipeline.workfile import (
|
||||
get_custom_workfile_template,
|
||||
get_custom_workfile_template_by_string_context
|
||||
)
|
||||
from openpype.settings import get_project_settings
|
||||
|
||||
|
||||
class CopyTemplateWorkfile(PreLaunchHook):
|
||||
|
|
@ -54,41 +54,22 @@ class CopyTemplateWorkfile(PreLaunchHook):
|
|||
project_name = self.data["project_name"]
|
||||
asset_name = self.data["asset_name"]
|
||||
task_name = self.data["task_name"]
|
||||
host_name = self.application.host_name
|
||||
|
||||
project_settings = get_project_settings(project_name)
|
||||
host_settings = project_settings[self.application.host_name]
|
||||
|
||||
workfile_builder_settings = host_settings.get("workfile_builder")
|
||||
if not workfile_builder_settings:
|
||||
# TODO remove warning when deprecated
|
||||
self.log.warning((
|
||||
"Seems like old version of settings is used."
|
||||
" Can't access custom templates in host \"{}\"."
|
||||
).format(self.application.full_label))
|
||||
return
|
||||
|
||||
if not workfile_builder_settings["create_first_version"]:
|
||||
self.log.info((
|
||||
"Project \"{}\" has turned off to create first workfile for"
|
||||
" application \"{}\""
|
||||
).format(project_name, self.application.full_label))
|
||||
return
|
||||
|
||||
# Backwards compatibility
|
||||
template_profiles = workfile_builder_settings.get("custom_templates")
|
||||
if not template_profiles:
|
||||
self.log.info(
|
||||
"Custom templates are not filled. Skipping template copy."
|
||||
)
|
||||
return
|
||||
|
||||
project_doc = self.data.get("project_doc")
|
||||
asset_doc = self.data.get("asset_doc")
|
||||
anatomy = self.data.get("anatomy")
|
||||
if project_doc and asset_doc:
|
||||
self.log.debug("Started filtering of custom template paths.")
|
||||
template_path = get_custom_workfile_template_by_context(
|
||||
template_profiles, project_doc, asset_doc, task_name, anatomy
|
||||
template_path = get_custom_workfile_template(
|
||||
project_doc,
|
||||
asset_doc,
|
||||
task_name,
|
||||
host_name,
|
||||
anatomy,
|
||||
project_settings
|
||||
)
|
||||
|
||||
else:
|
||||
|
|
@ -96,10 +77,13 @@ class CopyTemplateWorkfile(PreLaunchHook):
|
|||
"Global data collection probably did not execute."
|
||||
" Using backup solution."
|
||||
))
|
||||
dbcon = self.data.get("dbcon")
|
||||
template_path = get_custom_workfile_template_by_string_context(
|
||||
template_profiles, project_name, asset_name, task_name,
|
||||
dbcon, anatomy
|
||||
project_name,
|
||||
asset_name,
|
||||
task_name,
|
||||
host_name,
|
||||
anatomy,
|
||||
project_settings
|
||||
)
|
||||
|
||||
if not template_path:
|
||||
|
|
|
|||
|
|
@ -1,12 +1,11 @@
|
|||
from openpype.api import Anatomy
|
||||
from openpype.client import get_project, get_asset_by_name
|
||||
from openpype.lib import (
|
||||
PreLaunchHook,
|
||||
EnvironmentPrepData,
|
||||
prepare_app_environments,
|
||||
prepare_context_environments
|
||||
)
|
||||
|
||||
import avalon.api
|
||||
from openpype.pipeline import AvalonMongoDB, Anatomy
|
||||
|
||||
|
||||
class GlobalHostDataHook(PreLaunchHook):
|
||||
|
|
@ -64,14 +63,14 @@ class GlobalHostDataHook(PreLaunchHook):
|
|||
self.data["anatomy"] = Anatomy(project_name)
|
||||
|
||||
# Mongo connection
|
||||
dbcon = avalon.api.AvalonMongoDB()
|
||||
dbcon = AvalonMongoDB()
|
||||
dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
dbcon.install()
|
||||
|
||||
self.data["dbcon"] = dbcon
|
||||
|
||||
# Project document
|
||||
project_doc = dbcon.find_one({"type": "project"})
|
||||
project_doc = get_project(project_name)
|
||||
self.data["project_doc"] = project_doc
|
||||
|
||||
asset_name = self.data.get("asset_name")
|
||||
|
|
@ -81,8 +80,5 @@ class GlobalHostDataHook(PreLaunchHook):
|
|||
)
|
||||
return
|
||||
|
||||
asset_doc = dbcon.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
})
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
self.data["asset_doc"] = asset_doc
|
||||
|
|
|
|||
13
openpype/host/__init__.py
Normal file
13
openpype/host/__init__.py
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
from .host import (
|
||||
HostBase,
|
||||
IWorkfileHost,
|
||||
ILoadHost,
|
||||
INewPublisher,
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
"HostBase",
|
||||
"IWorkfileHost",
|
||||
"ILoadHost",
|
||||
"INewPublisher",
|
||||
)
|
||||
531
openpype/host/host.py
Normal file
531
openpype/host/host.py
Normal file
|
|
@ -0,0 +1,531 @@
|
|||
import logging
|
||||
import contextlib
|
||||
from abc import ABCMeta, abstractproperty, abstractmethod
|
||||
import six
|
||||
|
||||
# NOTE can't import 'typing' because of issues in Maya 2020
|
||||
# - shiboken crashes on 'typing' module import
|
||||
|
||||
|
||||
class MissingMethodsError(ValueError):
|
||||
"""Exception when host miss some required methods for specific workflow.
|
||||
|
||||
Args:
|
||||
host (HostBase): Host implementation where are missing methods.
|
||||
missing_methods (list[str]): List of missing methods.
|
||||
"""
|
||||
|
||||
def __init__(self, host, missing_methods):
|
||||
joined_missing = ", ".join(
|
||||
['"{}"'.format(item) for item in missing_methods]
|
||||
)
|
||||
if isinstance(host, HostBase):
|
||||
host_name = host.name
|
||||
else:
|
||||
try:
|
||||
host_name = host.__file__.replace("\\", "/").split("/")[-3]
|
||||
except Exception:
|
||||
host_name = str(host)
|
||||
message = (
|
||||
"Host \"{}\" miss methods {}".format(host_name, joined_missing)
|
||||
)
|
||||
super(MissingMethodsError, self).__init__(message)
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class HostBase(object):
|
||||
"""Base of host implementation class.
|
||||
|
||||
Host is pipeline implementation of DCC application. This class should help
|
||||
to identify what must/should/can be implemented for specific functionality.
|
||||
|
||||
Compared to 'avalon' concept:
|
||||
What was before considered as functions in host implementation folder. The
|
||||
host implementation should primarily care about adding ability of creation
|
||||
(mark subsets to be published) and optionaly about referencing published
|
||||
representations as containers.
|
||||
|
||||
Host may need extend some functionality like working with workfiles
|
||||
or loading. Not all host implementations may allow that for those purposes
|
||||
can be logic extended with implementing functions for the purpose. There
|
||||
are prepared interfaces to be able identify what must be implemented to
|
||||
be able use that functionality.
|
||||
- current statement is that it is not required to inherit from interfaces
|
||||
but all of the methods are validated (only their existence!)
|
||||
|
||||
# Installation of host before (avalon concept):
|
||||
```python
|
||||
from openpype.pipeline import install_host
|
||||
import openpype.hosts.maya.api as host
|
||||
|
||||
install_host(host)
|
||||
```
|
||||
|
||||
# Installation of host now:
|
||||
```python
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.hosts.maya.api import MayaHost
|
||||
|
||||
host = MayaHost()
|
||||
install_host(host)
|
||||
```
|
||||
|
||||
Todo:
|
||||
- move content of 'install_host' as method of this class
|
||||
- register host object
|
||||
- install legacy_io
|
||||
- install global plugin paths
|
||||
- store registered plugin paths to this object
|
||||
- handle current context (project, asset, task)
|
||||
- this must be done in many separated steps
|
||||
- have it's object of host tools instead of using globals
|
||||
|
||||
This implementation will probably change over time when more
|
||||
functionality and responsibility will be added.
|
||||
"""
|
||||
|
||||
_log = None
|
||||
|
||||
def __init__(self):
|
||||
"""Initialization of host.
|
||||
|
||||
Register DCC callbacks, host specific plugin paths, targets etc.
|
||||
(Part of what 'install' did in 'avalon' concept.)
|
||||
|
||||
Note:
|
||||
At this moment global "installation" must happen before host
|
||||
installation. Because of this current limitation it is recommended
|
||||
to implement 'install' method which is triggered after global
|
||||
'install'.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = logging.getLogger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
||||
@abstractproperty
|
||||
def name(self):
|
||||
"""Host name."""
|
||||
|
||||
pass
|
||||
|
||||
def get_current_context(self):
|
||||
"""Get current context information.
|
||||
|
||||
This method should be used to get current context of host. Usage of
|
||||
this method can be crutial for host implementations in DCCs where
|
||||
can be opened multiple workfiles at one moment and change of context
|
||||
can't be catched properly.
|
||||
|
||||
Default implementation returns values from 'legacy_io.Session'.
|
||||
|
||||
Returns:
|
||||
dict: Context with 3 keys 'project_name', 'asset_name' and
|
||||
'task_name'. All of them can be 'None'.
|
||||
"""
|
||||
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
if legacy_io.is_installed():
|
||||
legacy_io.install()
|
||||
|
||||
return {
|
||||
"project_name": legacy_io.Session["AVALON_PROJECT"],
|
||||
"asset_name": legacy_io.Session["AVALON_ASSET"],
|
||||
"task_name": legacy_io.Session["AVALON_TASK"]
|
||||
}
|
||||
|
||||
def get_context_title(self):
|
||||
"""Context title shown for UI purposes.
|
||||
|
||||
Should return current context title if possible.
|
||||
|
||||
Note:
|
||||
This method is used only for UI purposes so it is possible to
|
||||
return some logical title for contextless cases.
|
||||
Is not meant for "Context menu" label.
|
||||
|
||||
Returns:
|
||||
str: Context title.
|
||||
None: Default title is used based on UI implementation.
|
||||
"""
|
||||
|
||||
# Use current context to fill the context title
|
||||
current_context = self.get_current_context()
|
||||
project_name = current_context["project_name"]
|
||||
asset_name = current_context["asset_name"]
|
||||
task_name = current_context["task_name"]
|
||||
items = []
|
||||
if project_name:
|
||||
items.append(project_name)
|
||||
if asset_name:
|
||||
items.append(asset_name)
|
||||
if task_name:
|
||||
items.append(task_name)
|
||||
if items:
|
||||
return "/".join(items)
|
||||
return None
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_selection(self):
|
||||
"""Some functionlity will happen but selection should stay same.
|
||||
|
||||
This is DCC specific. Some may not allow to implement this ability
|
||||
that is reason why default implementation is empty context manager.
|
||||
|
||||
Yields:
|
||||
None: Yield when is ready to restore selected at the end.
|
||||
"""
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
pass
|
||||
|
||||
|
||||
class ILoadHost:
|
||||
"""Implementation requirements to be able use reference of representations.
|
||||
|
||||
The load plugins can do referencing even without implementation of methods
|
||||
here, but switch and removement of containers would not be possible.
|
||||
|
||||
Questions:
|
||||
- Is list container dependency of host or load plugins?
|
||||
- Should this be directly in HostBase?
|
||||
- how to find out if referencing is available?
|
||||
- do we need to know that?
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def get_missing_load_methods(host):
|
||||
"""Look for missing methods on "old type" host implementation.
|
||||
|
||||
Method is used for validation of implemented functions related to
|
||||
loading. Checks only existence of methods.
|
||||
|
||||
Args:
|
||||
Union[ModuleType, HostBase]: Object of host where to look for
|
||||
required methods.
|
||||
|
||||
Returns:
|
||||
list[str]: Missing method implementations for loading workflow.
|
||||
"""
|
||||
|
||||
if isinstance(host, ILoadHost):
|
||||
return []
|
||||
|
||||
required = ["ls"]
|
||||
missing = []
|
||||
for name in required:
|
||||
if not hasattr(host, name):
|
||||
missing.append(name)
|
||||
return missing
|
||||
|
||||
@staticmethod
|
||||
def validate_load_methods(host):
|
||||
"""Validate implemented methods of "old type" host for load workflow.
|
||||
|
||||
Args:
|
||||
Union[ModuleType, HostBase]: Object of host to validate.
|
||||
|
||||
Raises:
|
||||
MissingMethodsError: If there are missing methods on host
|
||||
implementation.
|
||||
"""
|
||||
missing = ILoadHost.get_missing_load_methods(host)
|
||||
if missing:
|
||||
raise MissingMethodsError(host, missing)
|
||||
|
||||
@abstractmethod
|
||||
def get_containers(self):
|
||||
"""Retreive referenced containers from scene.
|
||||
|
||||
This can be implemented in hosts where referencing can be used.
|
||||
|
||||
Todo:
|
||||
Rename function to something more self explanatory.
|
||||
Suggestion: 'get_containers'
|
||||
|
||||
Returns:
|
||||
list[dict]: Information about loaded containers.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
# --- Deprecated method names ---
|
||||
def ls(self):
|
||||
"""Deprecated variant of 'get_containers'.
|
||||
|
||||
Todo:
|
||||
Remove when all usages are replaced.
|
||||
"""
|
||||
|
||||
return self.get_containers()
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class IWorkfileHost:
|
||||
"""Implementation requirements to be able use workfile utils and tool."""
|
||||
|
||||
@staticmethod
|
||||
def get_missing_workfile_methods(host):
|
||||
"""Look for missing methods on "old type" host implementation.
|
||||
|
||||
Method is used for validation of implemented functions related to
|
||||
workfiles. Checks only existence of methods.
|
||||
|
||||
Args:
|
||||
Union[ModuleType, HostBase]: Object of host where to look for
|
||||
required methods.
|
||||
|
||||
Returns:
|
||||
list[str]: Missing method implementations for workfiles workflow.
|
||||
"""
|
||||
|
||||
if isinstance(host, IWorkfileHost):
|
||||
return []
|
||||
|
||||
required = [
|
||||
"open_file",
|
||||
"save_file",
|
||||
"current_file",
|
||||
"has_unsaved_changes",
|
||||
"file_extensions",
|
||||
"work_root",
|
||||
]
|
||||
missing = []
|
||||
for name in required:
|
||||
if not hasattr(host, name):
|
||||
missing.append(name)
|
||||
return missing
|
||||
|
||||
@staticmethod
|
||||
def validate_workfile_methods(host):
|
||||
"""Validate methods of "old type" host for workfiles workflow.
|
||||
|
||||
Args:
|
||||
Union[ModuleType, HostBase]: Object of host to validate.
|
||||
|
||||
Raises:
|
||||
MissingMethodsError: If there are missing methods on host
|
||||
implementation.
|
||||
"""
|
||||
|
||||
missing = IWorkfileHost.get_missing_workfile_methods(host)
|
||||
if missing:
|
||||
raise MissingMethodsError(host, missing)
|
||||
|
||||
@abstractmethod
|
||||
def get_workfile_extensions(self):
|
||||
"""Extensions that can be used as save.
|
||||
|
||||
Questions:
|
||||
This could potentially use 'HostDefinition'.
|
||||
"""
|
||||
|
||||
return []
|
||||
|
||||
@abstractmethod
|
||||
def save_workfile(self, dst_path=None):
|
||||
"""Save currently opened scene.
|
||||
|
||||
Args:
|
||||
dst_path (str): Where the current scene should be saved. Or use
|
||||
current path if 'None' is passed.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def open_workfile(self, filepath):
|
||||
"""Open passed filepath in the host.
|
||||
|
||||
Args:
|
||||
filepath (str): Path to workfile.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_current_workfile(self):
|
||||
"""Retreive path to current opened file.
|
||||
|
||||
Returns:
|
||||
str: Path to file which is currently opened.
|
||||
None: If nothing is opened.
|
||||
"""
|
||||
|
||||
return None
|
||||
|
||||
def workfile_has_unsaved_changes(self):
|
||||
"""Currently opened scene is saved.
|
||||
|
||||
Not all hosts can know if current scene is saved because the API of
|
||||
DCC does not support it.
|
||||
|
||||
Returns:
|
||||
bool: True if scene is saved and False if has unsaved
|
||||
modifications.
|
||||
None: Can't tell if workfiles has modifications.
|
||||
"""
|
||||
|
||||
return None
|
||||
|
||||
def work_root(self, session):
|
||||
"""Modify workdir per host.
|
||||
|
||||
Default implementation keeps workdir untouched.
|
||||
|
||||
Warnings:
|
||||
We must handle this modification with more sofisticated way because
|
||||
this can't be called out of DCC so opening of last workfile
|
||||
(calculated before DCC is launched) is complicated. Also breaking
|
||||
defined work template is not a good idea.
|
||||
Only place where it's really used and can make sense is Maya. There
|
||||
workspace.mel can modify subfolders where to look for maya files.
|
||||
|
||||
Args:
|
||||
session (dict): Session context data.
|
||||
|
||||
Returns:
|
||||
str: Path to new workdir.
|
||||
"""
|
||||
|
||||
return session["AVALON_WORKDIR"]
|
||||
|
||||
# --- Deprecated method names ---
|
||||
def file_extensions(self):
|
||||
"""Deprecated variant of 'get_workfile_extensions'.
|
||||
|
||||
Todo:
|
||||
Remove when all usages are replaced.
|
||||
"""
|
||||
return self.get_workfile_extensions()
|
||||
|
||||
def save_file(self, dst_path=None):
|
||||
"""Deprecated variant of 'save_workfile'.
|
||||
|
||||
Todo:
|
||||
Remove when all usages are replaced.
|
||||
"""
|
||||
|
||||
self.save_workfile()
|
||||
|
||||
def open_file(self, filepath):
|
||||
"""Deprecated variant of 'open_workfile'.
|
||||
|
||||
Todo:
|
||||
Remove when all usages are replaced.
|
||||
"""
|
||||
|
||||
return self.open_workfile(filepath)
|
||||
|
||||
def current_file(self):
|
||||
"""Deprecated variant of 'get_current_workfile'.
|
||||
|
||||
Todo:
|
||||
Remove when all usages are replaced.
|
||||
"""
|
||||
|
||||
return self.get_current_workfile()
|
||||
|
||||
def has_unsaved_changes(self):
|
||||
"""Deprecated variant of 'workfile_has_unsaved_changes'.
|
||||
|
||||
Todo:
|
||||
Remove when all usages are replaced.
|
||||
"""
|
||||
|
||||
return self.workfile_has_unsaved_changes()
|
||||
|
||||
|
||||
class INewPublisher:
|
||||
"""Functions related to new creation system in new publisher.
|
||||
|
||||
New publisher is not storing information only about each created instance
|
||||
but also some global data. At this moment are data related only to context
|
||||
publish plugins but that can extend in future.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def get_missing_publish_methods(host):
|
||||
"""Look for missing methods on "old type" host implementation.
|
||||
|
||||
Method is used for validation of implemented functions related to
|
||||
new publish creation. Checks only existence of methods.
|
||||
|
||||
Args:
|
||||
Union[ModuleType, HostBase]: Host module where to look for
|
||||
required methods.
|
||||
|
||||
Returns:
|
||||
list[str]: Missing method implementations for new publsher
|
||||
workflow.
|
||||
"""
|
||||
|
||||
if isinstance(host, INewPublisher):
|
||||
return []
|
||||
|
||||
required = [
|
||||
"get_context_data",
|
||||
"update_context_data",
|
||||
]
|
||||
missing = []
|
||||
for name in required:
|
||||
if not hasattr(host, name):
|
||||
missing.append(name)
|
||||
return missing
|
||||
|
||||
@staticmethod
|
||||
def validate_publish_methods(host):
|
||||
"""Validate implemented methods of "old type" host.
|
||||
|
||||
Args:
|
||||
Union[ModuleType, HostBase]: Host module to validate.
|
||||
|
||||
Raises:
|
||||
MissingMethodsError: If there are missing methods on host
|
||||
implementation.
|
||||
"""
|
||||
missing = INewPublisher.get_missing_publish_methods(host)
|
||||
if missing:
|
||||
raise MissingMethodsError(host, missing)
|
||||
|
||||
@abstractmethod
|
||||
def get_context_data(self):
|
||||
"""Get global data related to creation-publishing from workfile.
|
||||
|
||||
These data are not related to any created instance but to whole
|
||||
publishing context. Not saving/returning them will cause that each
|
||||
reset of publishing resets all values to default ones.
|
||||
|
||||
Context data can contain information about enabled/disabled publish
|
||||
plugins or other values that can be filled by artist.
|
||||
|
||||
Returns:
|
||||
dict: Context data stored using 'update_context_data'.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update_context_data(self, data, changes):
|
||||
"""Store global context data to workfile.
|
||||
|
||||
Called when some values in context data has changed.
|
||||
|
||||
Without storing the values in a way that 'get_context_data' would
|
||||
return them will each reset of publishing cause loose of filled values
|
||||
by artist. Best practice is to store values into workfile, if possible.
|
||||
|
||||
Args:
|
||||
data (dict): New data as are.
|
||||
changes (dict): Only data that has been changed. Each value has
|
||||
tuple with '(<old>, <new>)' value.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
|
@ -1,9 +1,6 @@
|
|||
def add_implementation_envs(env, _app):
|
||||
"""Modify environments to contain all required for implementation."""
|
||||
defaults = {
|
||||
"OPENPYPE_LOG_NO_COLORS": "True",
|
||||
"WEBSOCKET_URL": "ws://localhost:8097/ws/"
|
||||
}
|
||||
for key, value in defaults.items():
|
||||
if not env.get(key):
|
||||
env[key] = value
|
||||
from .addon import AfterEffectsAddon
|
||||
|
||||
|
||||
__all__ = (
|
||||
"AfterEffectsAddon",
|
||||
)
|
||||
|
|
|
|||
23
openpype/hosts/aftereffects/addon.py
Normal file
23
openpype/hosts/aftereffects/addon.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
|
||||
|
||||
class AfterEffectsAddon(OpenPypeModule, IHostAddon):
|
||||
name = "aftereffects"
|
||||
host_name = "aftereffects"
|
||||
|
||||
def initialize(self, module_settings):
|
||||
self.enabled = True
|
||||
|
||||
def add_implementation_envs(self, env, _app):
|
||||
"""Modify environments to contain all required for implementation."""
|
||||
defaults = {
|
||||
"OPENPYPE_LOG_NO_COLORS": "True",
|
||||
"WEBSOCKET_URL": "ws://localhost:8097/ws/"
|
||||
}
|
||||
for key, value in defaults.items():
|
||||
if not env.get(key):
|
||||
env[key] = value
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".aep"]
|
||||
|
|
@ -16,7 +16,10 @@ from .pipeline import (
|
|||
uninstall,
|
||||
list_instances,
|
||||
remove_instance,
|
||||
containerise
|
||||
containerise,
|
||||
get_context_data,
|
||||
update_context_data,
|
||||
get_context_title
|
||||
)
|
||||
|
||||
from .workio import (
|
||||
|
|
@ -51,6 +54,9 @@ __all__ = [
|
|||
"list_instances",
|
||||
"remove_instance",
|
||||
"containerise",
|
||||
"get_context_data",
|
||||
"update_context_data",
|
||||
"get_context_title",
|
||||
|
||||
"file_extensions",
|
||||
"has_unsaved_changes",
|
||||
|
|
|
|||
Binary file not shown.
|
|
@ -1,5 +1,5 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ExtensionManifest Version="8.0" ExtensionBundleId="com.openpype.AE.panel" ExtensionBundleVersion="1.0.22"
|
||||
<ExtensionManifest Version="8.0" ExtensionBundleId="com.openpype.AE.panel" ExtensionBundleVersion="1.0.23"
|
||||
ExtensionBundleName="openpype" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<ExtensionList>
|
||||
<Extension Id="com.openpype.AE.panel" Version="1.0" />
|
||||
|
|
|
|||
|
|
@ -417,7 +417,9 @@ function getRenderInfo(){
|
|||
var file_url = item.file.toString();
|
||||
|
||||
return JSON.stringify({
|
||||
"file_name": file_url
|
||||
"file_name": file_url,
|
||||
"width": render_item.comp.width,
|
||||
"height": render_item.comp.height
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -12,9 +12,8 @@ from wsrpc_aiohttp import (
|
|||
|
||||
from Qt import QtCore
|
||||
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.tools.utils import host_tools
|
||||
|
||||
from avalon import api
|
||||
from openpype.tools.adobe_webserver.app import WebServerTool
|
||||
|
||||
from .ws_stub import AfterEffectsServerStub
|
||||
|
|
@ -271,13 +270,13 @@ class AfterEffectsRoute(WebSocketRoute):
|
|||
log.info("Setting context change")
|
||||
log.info("project {} asset {} ".format(project, asset))
|
||||
if project:
|
||||
api.Session["AVALON_PROJECT"] = project
|
||||
legacy_io.Session["AVALON_PROJECT"] = project
|
||||
os.environ["AVALON_PROJECT"] = project
|
||||
if asset:
|
||||
api.Session["AVALON_ASSET"] = asset
|
||||
legacy_io.Session["AVALON_ASSET"] = asset
|
||||
os.environ["AVALON_ASSET"] = asset
|
||||
if task:
|
||||
api.Session["AVALON_TASK"] = task
|
||||
legacy_io.Session["AVALON_TASK"] = task
|
||||
os.environ["AVALON_TASK"] = task
|
||||
|
||||
async def read(self):
|
||||
|
|
|
|||
|
|
@ -1,12 +1,16 @@
|
|||
import os
|
||||
import sys
|
||||
import re
|
||||
import json
|
||||
import contextlib
|
||||
import traceback
|
||||
import logging
|
||||
from functools import partial
|
||||
|
||||
from Qt import QtWidgets
|
||||
|
||||
from openpype.lib.remote_publish import headless_publish
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
from openpype.tools.utils import host_tools
|
||||
from .launch_logic import ProcessLauncher, get_stub
|
||||
|
|
@ -22,10 +26,9 @@ def safe_excepthook(*args):
|
|||
def main(*subprocess_args):
|
||||
sys.excepthook = safe_excepthook
|
||||
|
||||
import avalon.api
|
||||
from openpype.hosts.aftereffects import api
|
||||
|
||||
avalon.api.install(api)
|
||||
install_host(api)
|
||||
|
||||
os.environ["OPENPYPE_LOG_NO_COLORS"] = "False"
|
||||
app = QtWidgets.QApplication([])
|
||||
|
|
@ -35,10 +38,18 @@ def main(*subprocess_args):
|
|||
launcher.start()
|
||||
|
||||
if os.environ.get("HEADLESS_PUBLISH"):
|
||||
launcher.execute_in_main_thread(lambda: headless_publish(
|
||||
log,
|
||||
"CloseAE",
|
||||
os.environ.get("IS_TEST")))
|
||||
manager = ModulesManager()
|
||||
webpublisher_addon = manager["webpublisher"]
|
||||
|
||||
launcher.execute_in_main_thread(
|
||||
partial(
|
||||
webpublisher_addon.headless_publish,
|
||||
log,
|
||||
"CloseAE",
|
||||
os.environ.get("IS_TEST")
|
||||
)
|
||||
)
|
||||
|
||||
elif os.environ.get("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH", True):
|
||||
save = False
|
||||
if os.getenv("WORKFILES_SAVE_AS"):
|
||||
|
|
@ -68,3 +79,57 @@ def get_extension_manifest_path():
|
|||
"CSXS",
|
||||
"manifest.xml"
|
||||
)
|
||||
|
||||
|
||||
def get_unique_layer_name(layers, name):
|
||||
"""
|
||||
Gets all layer names and if 'name' is present in them, increases
|
||||
suffix by 1 (eg. creates unique layer name - for Loader)
|
||||
Args:
|
||||
layers (list): of strings, names only
|
||||
name (string): checked value
|
||||
|
||||
Returns:
|
||||
(string): name_00X (without version)
|
||||
"""
|
||||
names = {}
|
||||
for layer in layers:
|
||||
layer_name = re.sub(r'_\d{3}$', '', layer)
|
||||
if layer_name in names.keys():
|
||||
names[layer_name] = names[layer_name] + 1
|
||||
else:
|
||||
names[layer_name] = 1
|
||||
occurrences = names.get(name, 0)
|
||||
|
||||
return "{}_{:0>3d}".format(name, occurrences + 1)
|
||||
|
||||
|
||||
def get_background_layers(file_url):
|
||||
"""
|
||||
Pulls file name from background json file, enrich with folder url for
|
||||
AE to be able import files.
|
||||
|
||||
Order is important, follows order in json.
|
||||
|
||||
Args:
|
||||
file_url (str): abs url of background json
|
||||
|
||||
Returns:
|
||||
(list): of abs paths to images
|
||||
"""
|
||||
with open(file_url) as json_file:
|
||||
data = json.load(json_file)
|
||||
|
||||
layers = list()
|
||||
bg_folder = os.path.dirname(file_url)
|
||||
for child in data['children']:
|
||||
if child.get("filename"):
|
||||
layers.append(os.path.join(bg_folder, child.get("filename")).
|
||||
replace("\\", "/"))
|
||||
else:
|
||||
for layer in child['children']:
|
||||
if layer.get("filename"):
|
||||
layers.append(os.path.join(bg_folder,
|
||||
layer.get("filename")).
|
||||
replace("\\", "/"))
|
||||
return layers
|
||||
|
|
|
|||
|
|
@ -1,11 +1,8 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from Qt import QtWidgets
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
import pyblish.api
|
||||
from avalon import io
|
||||
|
||||
from openpype import lib
|
||||
from openpype.api import Logger
|
||||
|
|
@ -15,7 +12,9 @@ from openpype.pipeline import (
|
|||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
legacy_io,
|
||||
)
|
||||
from openpype.pipeline.load import any_outdated_containers
|
||||
import openpype.hosts.aftereffects
|
||||
from openpype.lib import register_event_callback
|
||||
|
||||
|
|
@ -33,39 +32,6 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
|||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
|
||||
|
||||
def check_inventory():
|
||||
if not lib.any_outdated():
|
||||
return
|
||||
|
||||
host = pyblish.api.registered_host()
|
||||
outdated_containers = []
|
||||
for container in host.ls():
|
||||
representation = container['representation']
|
||||
representation_doc = io.find_one(
|
||||
{
|
||||
"_id": ObjectId(representation),
|
||||
"type": "representation"
|
||||
},
|
||||
projection={"parent": True}
|
||||
)
|
||||
if representation_doc and not lib.is_latest(representation_doc):
|
||||
outdated_containers.append(container)
|
||||
|
||||
# Warn about outdated containers.
|
||||
print("Starting new QApplication..")
|
||||
app = QtWidgets.QApplication(sys.argv)
|
||||
|
||||
message_box = QtWidgets.QMessageBox()
|
||||
message_box.setIcon(QtWidgets.QMessageBox.Warning)
|
||||
msg = "There are outdated containers in the scene."
|
||||
message_box.setText(msg)
|
||||
message_box.exec_()
|
||||
|
||||
|
||||
def application_launch():
|
||||
check_inventory()
|
||||
|
||||
|
||||
def install():
|
||||
print("Installing Pype config...")
|
||||
|
||||
|
|
@ -89,19 +55,24 @@ def uninstall():
|
|||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
|
||||
def application_launch():
|
||||
"""Triggered after start of app"""
|
||||
check_inventory()
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
"""Toggle layer visibility on instance toggles."""
|
||||
instance[0].Visible = new_value
|
||||
|
||||
|
||||
def get_asset_settings():
|
||||
def get_asset_settings(asset_doc):
|
||||
"""Get settings on current asset from database.
|
||||
|
||||
Returns:
|
||||
dict: Scene data.
|
||||
|
||||
"""
|
||||
asset_data = lib.get_asset()["data"]
|
||||
asset_data = asset_doc["data"]
|
||||
fps = asset_data.get("fps")
|
||||
frame_start = asset_data.get("frameStart")
|
||||
frame_end = asset_data.get("frameEnd")
|
||||
|
|
@ -123,65 +94,6 @@ def get_asset_settings():
|
|||
}
|
||||
|
||||
|
||||
def containerise(name,
|
||||
namespace,
|
||||
comp,
|
||||
context,
|
||||
loader=None,
|
||||
suffix="_CON"):
|
||||
"""
|
||||
Containerisation enables a tracking of version, author and origin
|
||||
for loaded assets.
|
||||
|
||||
Creates dictionary payloads that gets saved into file metadata. Each
|
||||
container contains of who loaded (loader) and members (single or multiple
|
||||
in case of background).
|
||||
|
||||
Arguments:
|
||||
name (str): Name of resulting assembly
|
||||
namespace (str): Namespace under which to host container
|
||||
comp (Comp): Composition to containerise
|
||||
context (dict): Asset information
|
||||
loader (str, optional): Name of loader used to produce this container.
|
||||
suffix (str, optional): Suffix of container, defaults to `_CON`.
|
||||
|
||||
Returns:
|
||||
container (str): Name of container assembly
|
||||
"""
|
||||
data = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": name,
|
||||
"namespace": namespace,
|
||||
"loader": str(loader),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"members": comp.members or [comp.id]
|
||||
}
|
||||
|
||||
stub = get_stub()
|
||||
stub.imprint(comp, data)
|
||||
|
||||
return comp
|
||||
|
||||
|
||||
def _get_stub():
|
||||
"""
|
||||
Handle pulling stub from PS to run operations on host
|
||||
Returns:
|
||||
(AEServerStub) or None
|
||||
"""
|
||||
try:
|
||||
stub = get_stub() # only after Photoshop is up
|
||||
except lib.ConnectionNotEstablishedYet:
|
||||
print("Not connected yet, ignoring")
|
||||
return
|
||||
|
||||
if not stub.get_active_document_name():
|
||||
return
|
||||
|
||||
return stub
|
||||
|
||||
|
||||
def ls():
|
||||
"""Yields containers from active AfterEffects document.
|
||||
|
||||
|
|
@ -222,6 +134,66 @@ def ls():
|
|||
yield data
|
||||
|
||||
|
||||
def check_inventory():
|
||||
"""Checks loaded containers if they are of highest version"""
|
||||
if not any_outdated_containers():
|
||||
return
|
||||
|
||||
# Warn about outdated containers.
|
||||
_app = QtWidgets.QApplication.instance()
|
||||
if not _app:
|
||||
print("Starting new QApplication..")
|
||||
_app = QtWidgets.QApplication([])
|
||||
|
||||
message_box = QtWidgets.QMessageBox()
|
||||
message_box.setIcon(QtWidgets.QMessageBox.Warning)
|
||||
msg = "There are outdated containers in the scene."
|
||||
message_box.setText(msg)
|
||||
message_box.exec_()
|
||||
|
||||
|
||||
def containerise(name,
|
||||
namespace,
|
||||
comp,
|
||||
context,
|
||||
loader=None,
|
||||
suffix="_CON"):
|
||||
"""
|
||||
Containerisation enables a tracking of version, author and origin
|
||||
for loaded assets.
|
||||
|
||||
Creates dictionary payloads that gets saved into file metadata. Each
|
||||
container contains of who loaded (loader) and members (single or multiple
|
||||
in case of background).
|
||||
|
||||
Arguments:
|
||||
name (str): Name of resulting assembly
|
||||
namespace (str): Namespace under which to host container
|
||||
comp (AEItem): Composition to containerise
|
||||
context (dict): Asset information
|
||||
loader (str, optional): Name of loader used to produce this container.
|
||||
suffix (str, optional): Suffix of container, defaults to `_CON`.
|
||||
|
||||
Returns:
|
||||
container (str): Name of container assembly
|
||||
"""
|
||||
data = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": name,
|
||||
"namespace": namespace,
|
||||
"loader": str(loader),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"members": comp.members or [comp.id]
|
||||
}
|
||||
|
||||
stub = get_stub()
|
||||
stub.imprint(comp.id, data)
|
||||
|
||||
return comp
|
||||
|
||||
|
||||
# created instances section
|
||||
def list_instances():
|
||||
"""
|
||||
List all created instances from current workfile which
|
||||
|
|
@ -242,16 +214,8 @@ def list_instances():
|
|||
layers_meta = stub.get_metadata()
|
||||
|
||||
for instance in layers_meta:
|
||||
if instance.get("schema") and \
|
||||
"container" in instance.get("schema"):
|
||||
continue
|
||||
|
||||
uuid_val = instance.get("uuid")
|
||||
if uuid_val:
|
||||
instance['uuid'] = uuid_val
|
||||
else:
|
||||
instance['uuid'] = instance.get("members")[0] # legacy
|
||||
instances.append(instance)
|
||||
if instance.get("id") == "pyblish.avalon.instance":
|
||||
instances.append(instance)
|
||||
return instances
|
||||
|
||||
|
||||
|
|
@ -272,8 +236,59 @@ def remove_instance(instance):
|
|||
if not stub:
|
||||
return
|
||||
|
||||
stub.remove_instance(instance.get("uuid"))
|
||||
item = stub.get_item(instance.get("uuid"))
|
||||
if item:
|
||||
stub.rename_item(item.id,
|
||||
item.name.replace(stub.PUBLISH_ICON, ''))
|
||||
inst_id = instance.get("instance_id") or instance.get("uuid") # legacy
|
||||
if not inst_id:
|
||||
log.warning("No instance identifier for {}".format(instance))
|
||||
return
|
||||
|
||||
stub.remove_instance(inst_id)
|
||||
|
||||
if instance.get("members"):
|
||||
item = stub.get_item(instance["members"][0])
|
||||
if item:
|
||||
stub.rename_item(item.id,
|
||||
item.name.replace(stub.PUBLISH_ICON, ''))
|
||||
|
||||
|
||||
# new publisher section
|
||||
def get_context_data():
|
||||
meta = _get_stub().get_metadata()
|
||||
for item in meta:
|
||||
if item.get("id") == "publish_context":
|
||||
item.pop("id")
|
||||
return item
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
def update_context_data(data, changes):
|
||||
item = data
|
||||
item["id"] = "publish_context"
|
||||
_get_stub().imprint(item["id"], item)
|
||||
|
||||
|
||||
def get_context_title():
|
||||
"""Returns title for Creator window"""
|
||||
|
||||
project_name = legacy_io.Session["AVALON_PROJECT"]
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
return "{}/{}/{}".format(project_name, asset_name, task_name)
|
||||
|
||||
|
||||
def _get_stub():
|
||||
"""
|
||||
Handle pulling stub from PS to run operations on host
|
||||
Returns:
|
||||
(AEServerStub) or None
|
||||
"""
|
||||
try:
|
||||
stub = get_stub() # only after Photoshop is up
|
||||
except lib.ConnectionNotEstablishedYet:
|
||||
print("Not connected yet, ignoring")
|
||||
return
|
||||
|
||||
if not stub.get_active_document_name():
|
||||
return
|
||||
|
||||
return stub
|
||||
|
|
|
|||
|
|
@ -1,12 +1,11 @@
|
|||
"""Host API required Work Files tool"""
|
||||
import os
|
||||
|
||||
from openpype.pipeline import HOST_WORKFILE_EXTENSIONS
|
||||
from .launch_logic import get_stub
|
||||
|
||||
|
||||
def file_extensions():
|
||||
return HOST_WORKFILE_EXTENSIONS["aftereffects"]
|
||||
return [".aep"]
|
||||
|
||||
|
||||
def has_unsaved_changes():
|
||||
|
|
@ -51,4 +50,4 @@ def _active_document():
|
|||
print("Nothing opened")
|
||||
pass
|
||||
|
||||
return document_name
|
||||
return document_name
|
||||
|
|
|
|||
|
|
@ -28,6 +28,9 @@ class AEItem(object):
|
|||
workAreaDuration = attr.ib(default=None)
|
||||
frameRate = attr.ib(default=None)
|
||||
file_name = attr.ib(default=None)
|
||||
instance_id = attr.ib(default=None) # New Publisher
|
||||
width = attr.ib(default=None)
|
||||
height = attr.ib(default=None)
|
||||
|
||||
|
||||
class AfterEffectsServerStub():
|
||||
|
|
@ -110,11 +113,11 @@ class AfterEffectsServerStub():
|
|||
|
||||
self.log.debug("Couldn't find layer metadata")
|
||||
|
||||
def imprint(self, item, data, all_items=None, items_meta=None):
|
||||
def imprint(self, item_id, data, all_items=None, items_meta=None):
|
||||
"""
|
||||
Save item metadata to Label field of metadata of active document
|
||||
Args:
|
||||
item (AEItem):
|
||||
item_id (int|str): id of FootageItem or instance_id for workfiles
|
||||
data(string): json representation for single layer
|
||||
all_items (list of item): for performance, could be
|
||||
injected for usage in loop, if not, single call will be
|
||||
|
|
@ -132,8 +135,9 @@ class AfterEffectsServerStub():
|
|||
is_new = True
|
||||
|
||||
for item_meta in items_meta:
|
||||
if item_meta.get('members') \
|
||||
and str(item.id) == str(item_meta.get('members')[0]):
|
||||
if ((item_meta.get('members') and
|
||||
str(item_id) == str(item_meta.get('members')[0])) or
|
||||
item_meta.get("instance_id") == item_id):
|
||||
is_new = False
|
||||
if data:
|
||||
item_meta.update(data)
|
||||
|
|
@ -153,10 +157,12 @@ class AfterEffectsServerStub():
|
|||
item_ids = [int(item.id) for item in all_items]
|
||||
cleaned_data = []
|
||||
for meta in result_meta:
|
||||
# for creation of instance OR loaded container
|
||||
if 'instance' in meta.get('id') or \
|
||||
int(meta.get('members')[0]) in item_ids:
|
||||
cleaned_data.append(meta)
|
||||
# do not added instance with nonexistend item id
|
||||
if meta.get("members"):
|
||||
if int(meta["members"][0]) not in item_ids:
|
||||
continue
|
||||
|
||||
cleaned_data.append(meta)
|
||||
|
||||
payload = json.dumps(cleaned_data, indent=4)
|
||||
|
||||
|
|
@ -167,7 +173,7 @@ class AfterEffectsServerStub():
|
|||
|
||||
def get_active_document_full_name(self):
|
||||
"""
|
||||
Returns just a name of active document via ws call
|
||||
Returns absolute path of active document via ws call
|
||||
Returns(string): file name
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call(
|
||||
|
|
@ -314,15 +320,13 @@ class AfterEffectsServerStub():
|
|||
Keep matching item in file though.
|
||||
|
||||
Args:
|
||||
instance_id(string): instance uuid
|
||||
instance_id(string): instance id
|
||||
"""
|
||||
cleaned_data = []
|
||||
|
||||
for instance in self.get_metadata():
|
||||
uuid_val = instance.get("uuid")
|
||||
if not uuid_val:
|
||||
uuid_val = instance.get("members")[0] # legacy
|
||||
if uuid_val != instance_id:
|
||||
inst_id = instance.get("instance_id") or instance.get("uuid")
|
||||
if inst_id != instance_id:
|
||||
cleaned_data.append(instance)
|
||||
|
||||
payload = json.dumps(cleaned_data, indent=4)
|
||||
|
|
@ -357,7 +361,7 @@ class AfterEffectsServerStub():
|
|||
item_id (int):
|
||||
|
||||
Returns:
|
||||
(namedtuple)
|
||||
(AEItem)
|
||||
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
|
|
@ -418,7 +422,7 @@ class AfterEffectsServerStub():
|
|||
""" Get render queue info for render purposes
|
||||
|
||||
Returns:
|
||||
(namedtuple): with 'file_name' field
|
||||
(AEItem): with 'file_name' field
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.get_render_info'))
|
||||
|
|
@ -606,7 +610,10 @@ class AfterEffectsServerStub():
|
|||
d.get('workAreaStart'),
|
||||
d.get('workAreaDuration'),
|
||||
d.get('frameRate'),
|
||||
d.get('file_name'))
|
||||
d.get('file_name'),
|
||||
d.get("instance_id"),
|
||||
d.get("width"),
|
||||
d.get("height"))
|
||||
|
||||
ret.append(item)
|
||||
return ret
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from openpype.hosts.aftereffects.plugins.create import create_render
|
||||
from openpype.hosts.aftereffects.plugins.create import create_legacy_render
|
||||
|
||||
|
||||
class CreateLocalRender(create_render.CreateRender):
|
||||
class CreateLocalRender(create_legacy_render.CreateRender):
|
||||
""" Creator to render locally.
|
||||
|
||||
Created only after default render on farm. So family 'render.local' is
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
from openpype.pipeline import create
|
||||
from openpype.pipeline import CreatorError
|
||||
from openpype.hosts.aftereffects.api import (
|
||||
get_stub,
|
||||
list_instances
|
||||
)
|
||||
|
||||
|
||||
class CreateRender(create.LegacyCreator):
|
||||
"""Render folder for publish.
|
||||
|
||||
Creates subsets in format 'familyTaskSubsetname',
|
||||
eg 'renderCompositingMain'.
|
||||
|
||||
Create only single instance from composition at a time.
|
||||
"""
|
||||
|
||||
name = "renderDefault"
|
||||
label = "Render on Farm"
|
||||
family = "render"
|
||||
defaults = ["Main"]
|
||||
|
||||
def process(self):
|
||||
stub = get_stub() # only after After Effects is up
|
||||
items = []
|
||||
if (self.options or {}).get("useSelection"):
|
||||
items = stub.get_selected_items(
|
||||
comps=True, folders=False, footages=False
|
||||
)
|
||||
if len(items) > 1:
|
||||
raise CreatorError(
|
||||
"Please select only single composition at time."
|
||||
)
|
||||
|
||||
if not items:
|
||||
raise CreatorError((
|
||||
"Nothing to create. Select composition "
|
||||
"if 'useSelection' or create at least "
|
||||
"one composition."
|
||||
))
|
||||
|
||||
existing_subsets = [
|
||||
instance['subset'].lower()
|
||||
for instance in list_instances()
|
||||
]
|
||||
|
||||
item = items.pop()
|
||||
if self.name.lower() in existing_subsets:
|
||||
txt = "Instance with name \"{}\" already exists.".format(self.name)
|
||||
raise CreatorError(txt)
|
||||
|
||||
self.data["members"] = [item.id]
|
||||
self.data["uuid"] = item.id # for SubsetManager
|
||||
self.data["subset"] = (
|
||||
self.data["subset"]
|
||||
.replace(stub.PUBLISH_ICON, '')
|
||||
.replace(stub.LOADED_ICON, '')
|
||||
)
|
||||
|
||||
stub.imprint(item, self.data)
|
||||
stub.set_label_color(item.id, 14) # Cyan options 0 - 16
|
||||
stub.rename_item(item.id, stub.PUBLISH_ICON + self.data["subset"])
|
||||
|
|
@ -1,38 +1,67 @@
|
|||
from openpype import resources
|
||||
from openpype.lib import BoolDef, UISeparatorDef
|
||||
from openpype.hosts.aftereffects import api
|
||||
from openpype.pipeline import (
|
||||
Creator,
|
||||
CreatedInstance,
|
||||
CreatorError,
|
||||
LegacyCreator
|
||||
)
|
||||
from openpype.hosts.aftereffects.api import (
|
||||
get_stub,
|
||||
list_instances
|
||||
legacy_io,
|
||||
)
|
||||
|
||||
|
||||
class CreateRender(LegacyCreator):
|
||||
"""Render folder for publish.
|
||||
|
||||
Creates subsets in format 'familyTaskSubsetname',
|
||||
eg 'renderCompositingMain'.
|
||||
|
||||
Create only single instance from composition at a time.
|
||||
"""
|
||||
|
||||
name = "renderDefault"
|
||||
label = "Render on Farm"
|
||||
class RenderCreator(Creator):
|
||||
identifier = "render"
|
||||
label = "Render"
|
||||
family = "render"
|
||||
defaults = ["Main"]
|
||||
description = "Render creator"
|
||||
|
||||
def process(self):
|
||||
stub = get_stub() # only after After Effects is up
|
||||
if (self.options or {}).get("useSelection"):
|
||||
create_allow_context_change = True
|
||||
|
||||
def __init__(self, project_settings, *args, **kwargs):
|
||||
super(RenderCreator, self).__init__(project_settings, *args, **kwargs)
|
||||
self._default_variants = (project_settings["aftereffects"]
|
||||
["create"]
|
||||
["RenderCreator"]
|
||||
["defaults"])
|
||||
|
||||
def get_icon(self):
|
||||
return resources.get_openpype_splash_filepath()
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in api.list_instances():
|
||||
# legacy instances have family=='render' or 'renderLocal', use them
|
||||
creator_id = (instance_data.get("creator_identifier") or
|
||||
instance_data.get("family", '').replace("Local", ''))
|
||||
if creator_id == self.identifier:
|
||||
instance_data = self._handle_legacy(instance_data)
|
||||
instance = CreatedInstance.from_existing(
|
||||
instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, _changes in update_list:
|
||||
api.get_stub().imprint(created_inst.get("instance_id"),
|
||||
created_inst.data_to_store())
|
||||
|
||||
def remove_instances(self, instances):
|
||||
for instance in instances:
|
||||
api.remove_instance(instance)
|
||||
self._remove_instance_from_context(instance)
|
||||
|
||||
def create(self, subset_name, data, pre_create_data):
|
||||
stub = api.get_stub() # only after After Effects is up
|
||||
if pre_create_data.get("use_selection"):
|
||||
items = stub.get_selected_items(
|
||||
comps=True, folders=False, footages=False
|
||||
)
|
||||
else:
|
||||
items = stub.get_items(comps=True, folders=False, footages=False)
|
||||
|
||||
if len(items) > 1:
|
||||
raise CreatorError(
|
||||
"Please select only single composition at time."
|
||||
)
|
||||
|
||||
if not items:
|
||||
raise CreatorError((
|
||||
"Nothing to create. Select composition "
|
||||
|
|
@ -40,24 +69,54 @@ class CreateRender(LegacyCreator):
|
|||
"one composition."
|
||||
))
|
||||
|
||||
existing_subsets = [
|
||||
instance['subset'].lower()
|
||||
for instance in list_instances()
|
||||
for inst in self.create_context.instances:
|
||||
if subset_name == inst.subset_name:
|
||||
raise CreatorError("{} already exists".format(
|
||||
inst.subset_name))
|
||||
|
||||
data["members"] = [items[0].id]
|
||||
new_instance = CreatedInstance(self.family, subset_name, data, self)
|
||||
if "farm" in pre_create_data:
|
||||
use_farm = pre_create_data["farm"]
|
||||
new_instance.creator_attributes["farm"] = use_farm
|
||||
|
||||
api.get_stub().imprint(new_instance.id,
|
||||
new_instance.data_to_store())
|
||||
self._add_instance_to_context(new_instance)
|
||||
|
||||
def get_default_variants(self):
|
||||
return self._default_variants
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return [BoolDef("farm", label="Render on farm")]
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
output = [
|
||||
BoolDef("use_selection", default=True, label="Use selection"),
|
||||
UISeparatorDef(),
|
||||
BoolDef("farm", label="Render on farm")
|
||||
]
|
||||
return output
|
||||
|
||||
item = items.pop()
|
||||
if self.name.lower() in existing_subsets:
|
||||
txt = "Instance with name \"{}\" already exists.".format(self.name)
|
||||
raise CreatorError(txt)
|
||||
def get_detail_description(self):
|
||||
return """Creator for Render instances"""
|
||||
|
||||
self.data["members"] = [item.id]
|
||||
self.data["uuid"] = item.id # for SubsetManager
|
||||
self.data["subset"] = (
|
||||
self.data["subset"]
|
||||
.replace(stub.PUBLISH_ICON, '')
|
||||
.replace(stub.LOADED_ICON, '')
|
||||
)
|
||||
def _handle_legacy(self, instance_data):
|
||||
"""Converts old instances to new format."""
|
||||
if not instance_data.get("members"):
|
||||
instance_data["members"] = [instance_data.get("uuid")]
|
||||
|
||||
stub.imprint(item, self.data)
|
||||
stub.set_label_color(item.id, 14) # Cyan options 0 - 16
|
||||
stub.rename_item(item.id, stub.PUBLISH_ICON + self.data["subset"])
|
||||
if instance_data.get("uuid"):
|
||||
# uuid not needed, replaced with unique instance_id
|
||||
api.get_stub().remove_instance(instance_data.get("uuid"))
|
||||
instance_data.pop("uuid")
|
||||
|
||||
if not instance_data.get("task"):
|
||||
instance_data["task"] = legacy_io.Session.get("AVALON_TASK")
|
||||
|
||||
if not instance_data.get("creator_attributes"):
|
||||
is_old_farm = instance_data["family"] != "renderLocal"
|
||||
instance_data["creator_attributes"] = {"farm": is_old_farm}
|
||||
instance_data["family"] = self.family
|
||||
|
||||
return instance_data
|
||||
|
|
|
|||
|
|
@ -0,0 +1,80 @@
|
|||
import openpype.hosts.aftereffects.api as api
|
||||
from openpype.client import get_asset_by_name
|
||||
from openpype.pipeline import (
|
||||
AutoCreator,
|
||||
CreatedInstance,
|
||||
legacy_io,
|
||||
)
|
||||
|
||||
|
||||
class AEWorkfileCreator(AutoCreator):
|
||||
identifier = "workfile"
|
||||
family = "workfile"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return []
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in api.list_instances():
|
||||
creator_id = instance_data.get("creator_identifier")
|
||||
if creator_id == self.identifier:
|
||||
subset_name = instance_data["subset"]
|
||||
instance = CreatedInstance(
|
||||
self.family, subset_name, instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
# nothing to change on workfiles
|
||||
pass
|
||||
|
||||
def create(self, options=None):
|
||||
existing_instance = None
|
||||
for instance in self.create_context.instances:
|
||||
if instance.family == self.family:
|
||||
existing_instance = instance
|
||||
break
|
||||
|
||||
project_name = legacy_io.Session["AVALON_PROJECT"]
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
host_name = legacy_io.Session["AVALON_APP"]
|
||||
|
||||
if existing_instance is None:
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
)
|
||||
data = {
|
||||
"asset": asset_name,
|
||||
"task": task_name,
|
||||
"variant": self.default_variant
|
||||
}
|
||||
data.update(self.get_dynamic_data(
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
))
|
||||
|
||||
new_instance = CreatedInstance(
|
||||
self.family, subset_name, data, self
|
||||
)
|
||||
self._add_instance_to_context(new_instance)
|
||||
|
||||
api.get_stub().imprint(new_instance.get("instance_id"),
|
||||
new_instance.data_to_store())
|
||||
|
||||
elif (
|
||||
existing_instance["asset"] != asset_name
|
||||
or existing_instance["task"] != task_name
|
||||
):
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
)
|
||||
existing_instance["asset"] = asset_name
|
||||
existing_instance["task"] = task_name
|
||||
existing_instance["subset"] = subset_name
|
||||
|
|
@ -1,14 +1,14 @@
|
|||
import re
|
||||
|
||||
from openpype.lib import (
|
||||
get_background_layers,
|
||||
get_unique_layer_name
|
||||
)
|
||||
from openpype.pipeline import get_representation_path
|
||||
from openpype.hosts.aftereffects.api import (
|
||||
AfterEffectsLoader,
|
||||
containerise
|
||||
)
|
||||
from openpype.hosts.aftereffects.api.lib import (
|
||||
get_background_layers,
|
||||
get_unique_layer_name,
|
||||
)
|
||||
|
||||
|
||||
class BackgroundLoader(AfterEffectsLoader):
|
||||
|
|
@ -90,7 +90,7 @@ class BackgroundLoader(AfterEffectsLoader):
|
|||
container["namespace"] = comp_name
|
||||
container["members"] = comp.members
|
||||
|
||||
stub.imprint(comp, container)
|
||||
stub.imprint(comp.id, container)
|
||||
|
||||
def remove(self, container):
|
||||
"""
|
||||
|
|
@ -99,10 +99,9 @@ class BackgroundLoader(AfterEffectsLoader):
|
|||
Args:
|
||||
container (dict): container to be removed - used to get layer_id
|
||||
"""
|
||||
print("!!!! container:: {}".format(container))
|
||||
stub = self.get_stub()
|
||||
layer = container.pop("layer")
|
||||
stub.imprint(layer, {})
|
||||
stub.imprint(layer.id, {})
|
||||
stub.delete_item(layer.id)
|
||||
|
||||
def switch(self, container, representation):
|
||||
|
|
|
|||
|
|
@ -1,12 +1,11 @@
|
|||
import re
|
||||
|
||||
from openpype import lib
|
||||
|
||||
from openpype.pipeline import get_representation_path
|
||||
from openpype.hosts.aftereffects.api import (
|
||||
AfterEffectsLoader,
|
||||
containerise
|
||||
)
|
||||
from openpype.hosts.aftereffects.api.lib import get_unique_layer_name
|
||||
|
||||
|
||||
class FileLoader(AfterEffectsLoader):
|
||||
|
|
@ -28,7 +27,7 @@ class FileLoader(AfterEffectsLoader):
|
|||
stub = self.get_stub()
|
||||
layers = stub.get_items(comps=True, folders=True, footages=True)
|
||||
existing_layers = [layer.name for layer in layers]
|
||||
comp_name = lib.get_unique_layer_name(
|
||||
comp_name = get_unique_layer_name(
|
||||
existing_layers, "{}_{}".format(context["asset"]["name"], name))
|
||||
|
||||
import_options = {}
|
||||
|
|
@ -87,7 +86,7 @@ class FileLoader(AfterEffectsLoader):
|
|||
if namespace_from_container != layer_name:
|
||||
layers = stub.get_items(comps=True)
|
||||
existing_layers = [layer.name for layer in layers]
|
||||
layer_name = lib.get_unique_layer_name(
|
||||
layer_name = get_unique_layer_name(
|
||||
existing_layers,
|
||||
"{}_{}".format(context["asset"], context["subset"]))
|
||||
else: # switching version - keep same name
|
||||
|
|
@ -96,9 +95,9 @@ class FileLoader(AfterEffectsLoader):
|
|||
# with aftereffects.maintained_selection(): # TODO
|
||||
stub.replace_item(layer.id, path, stub.LOADED_ICON + layer_name)
|
||||
stub.imprint(
|
||||
layer, {"representation": str(representation["_id"]),
|
||||
"name": context["subset"],
|
||||
"namespace": layer_name}
|
||||
layer.id, {"representation": str(representation["_id"]),
|
||||
"name": context["subset"],
|
||||
"namespace": layer_name}
|
||||
)
|
||||
|
||||
def remove(self, container):
|
||||
|
|
@ -109,7 +108,7 @@ class FileLoader(AfterEffectsLoader):
|
|||
"""
|
||||
stub = self.get_stub()
|
||||
layer = container.pop("layer")
|
||||
stub.imprint(layer, {})
|
||||
stub.imprint(layer.id, {})
|
||||
stub.delete_item(layer.id)
|
||||
|
||||
def switch(self, container, representation):
|
||||
|
|
|
|||
|
|
@ -17,12 +17,11 @@ class CollectAudio(pyblish.api.ContextPlugin):
|
|||
|
||||
def process(self, context):
|
||||
for instance in context:
|
||||
if instance.data["family"] == 'render.farm':
|
||||
if 'render.farm' in instance.data.get("families", []):
|
||||
comp_id = instance.data["comp_id"]
|
||||
if not comp_id:
|
||||
self.log.debug("No comp_id filled in instance")
|
||||
# @iLLiCiTiT QUESTION Should return or continue?
|
||||
return
|
||||
continue
|
||||
context.data["audioFile"] = os.path.normpath(
|
||||
get_stub().get_audio_url(comp_id)
|
||||
).replace("\\", "/")
|
||||
|
|
|
|||
|
|
@ -6,8 +6,8 @@ import attr
|
|||
import pyblish.api
|
||||
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.lib import abstract_collect_render
|
||||
from openpype.lib.abstract_collect_render import RenderInstance
|
||||
from openpype.pipeline import publish
|
||||
from openpype.pipeline.publish import RenderInstance
|
||||
|
||||
from openpype.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
|
@ -21,135 +21,130 @@ class AERenderInstance(RenderInstance):
|
|||
projectEntity = attr.ib(default=None)
|
||||
stagingDir = attr.ib(default=None)
|
||||
app_version = attr.ib(default=None)
|
||||
publish_attributes = attr.ib(default={})
|
||||
file_name = attr.ib(default=None)
|
||||
|
||||
|
||||
class CollectAERender(abstract_collect_render.AbstractCollectRender):
|
||||
class CollectAERender(publish.AbstractCollectRender):
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.498
|
||||
order = pyblish.api.CollectorOrder + 0.405
|
||||
label = "Collect After Effects Render Layers"
|
||||
hosts = ["aftereffects"]
|
||||
|
||||
# internal
|
||||
family_remapping = {
|
||||
"render": ("render.farm", "farm"), # (family, label)
|
||||
"renderLocal": ("render", "local")
|
||||
}
|
||||
padding_width = 6
|
||||
rendered_extension = 'png'
|
||||
|
||||
stub = get_stub()
|
||||
_stub = None
|
||||
|
||||
@classmethod
|
||||
def get_stub(cls):
|
||||
if not cls._stub:
|
||||
cls._stub = get_stub()
|
||||
return cls._stub
|
||||
|
||||
def get_instances(self, context):
|
||||
instances = []
|
||||
instances_to_remove = []
|
||||
|
||||
app_version = self.stub.get_app_version()
|
||||
app_version = CollectAERender.get_stub().get_app_version()
|
||||
app_version = app_version[0:4]
|
||||
|
||||
current_file = context.data["currentFile"]
|
||||
version = context.data["version"]
|
||||
asset_entity = context.data["assetEntity"]
|
||||
|
||||
project_entity = context.data["projectEntity"]
|
||||
|
||||
compositions = self.stub.get_items(True)
|
||||
compositions = CollectAERender.get_stub().get_items(True)
|
||||
compositions_by_id = {item.id: item for item in compositions}
|
||||
for inst in self.stub.get_metadata():
|
||||
schema = inst.get('schema')
|
||||
# loaded asset container skip it
|
||||
if schema and 'container' in schema:
|
||||
for inst in context:
|
||||
if not inst.data.get("active", True):
|
||||
continue
|
||||
|
||||
if not inst["members"]:
|
||||
raise ValueError("Couldn't find id, unable to publish. " +
|
||||
"Please recreate instance.")
|
||||
item_id = inst["members"][0]
|
||||
family = inst.data["family"]
|
||||
if family not in ["render", "renderLocal"]: # legacy
|
||||
continue
|
||||
|
||||
work_area_info = self.stub.get_work_area(int(item_id))
|
||||
item_id = inst.data["members"][0]
|
||||
|
||||
work_area_info = CollectAERender.get_stub().get_work_area(
|
||||
int(item_id))
|
||||
|
||||
if not work_area_info:
|
||||
self.log.warning("Orphaned instance, deleting metadata")
|
||||
self.stub.remove_instance(int(item_id))
|
||||
inst_id = inst.get("instance_id") or item_id
|
||||
CollectAERender.get_stub().remove_instance(inst_id)
|
||||
continue
|
||||
|
||||
frameStart = work_area_info.workAreaStart
|
||||
|
||||
frameEnd = round(work_area_info.workAreaStart +
|
||||
float(work_area_info.workAreaDuration) *
|
||||
float(work_area_info.frameRate)) - 1
|
||||
frame_start = work_area_info.workAreaStart
|
||||
frame_end = round(work_area_info.workAreaStart +
|
||||
float(work_area_info.workAreaDuration) *
|
||||
float(work_area_info.frameRate)) - 1
|
||||
fps = work_area_info.frameRate
|
||||
# TODO add resolution when supported by extension
|
||||
|
||||
if inst["family"] in self.family_remapping.keys() \
|
||||
and inst["active"]:
|
||||
remapped_family = self.family_remapping[inst["family"]]
|
||||
instance = AERenderInstance(
|
||||
family=remapped_family[0],
|
||||
families=[remapped_family[0]],
|
||||
version=version,
|
||||
time="",
|
||||
source=current_file,
|
||||
label="{} - {}".format(inst["subset"], remapped_family[1]),
|
||||
subset=inst["subset"],
|
||||
asset=context.data["assetEntity"]["name"],
|
||||
attachTo=False,
|
||||
setMembers='',
|
||||
publish=True,
|
||||
renderer='aerender',
|
||||
name=inst["subset"],
|
||||
resolutionWidth=asset_entity["data"].get(
|
||||
"resolutionWidth",
|
||||
project_entity["data"]["resolutionWidth"]),
|
||||
resolutionHeight=asset_entity["data"].get(
|
||||
"resolutionHeight",
|
||||
project_entity["data"]["resolutionHeight"]),
|
||||
pixelAspect=1,
|
||||
tileRendering=False,
|
||||
tilesX=0,
|
||||
tilesY=0,
|
||||
frameStart=frameStart,
|
||||
frameEnd=frameEnd,
|
||||
frameStep=1,
|
||||
toBeRenderedOn='deadline',
|
||||
fps=fps,
|
||||
app_version=app_version
|
||||
)
|
||||
task_name = inst.data.get("task") # legacy
|
||||
|
||||
comp = compositions_by_id.get(int(item_id))
|
||||
if not comp:
|
||||
raise ValueError("There is no composition for item {}".
|
||||
format(item_id))
|
||||
instance.comp_name = comp.name
|
||||
instance.comp_id = item_id
|
||||
instance._anatomy = context.data["anatomy"]
|
||||
instance.anatomyData = context.data["anatomyData"]
|
||||
render_q = CollectAERender.get_stub().get_render_info()
|
||||
if not render_q:
|
||||
raise ValueError("No file extension set in Render Queue")
|
||||
|
||||
instance.outputDir = self._get_output_dir(instance)
|
||||
instance.context = context
|
||||
subset_name = inst.data["subset"]
|
||||
instance = AERenderInstance(
|
||||
family="render",
|
||||
families=inst.data.get("families", []),
|
||||
version=version,
|
||||
time="",
|
||||
source=current_file,
|
||||
label="{} - {}".format(subset_name, family),
|
||||
subset=subset_name,
|
||||
asset=inst.data["asset"],
|
||||
task=task_name,
|
||||
attachTo=False,
|
||||
setMembers='',
|
||||
publish=True,
|
||||
name=subset_name,
|
||||
resolutionWidth=render_q.width,
|
||||
resolutionHeight=render_q.height,
|
||||
pixelAspect=1,
|
||||
tileRendering=False,
|
||||
tilesX=0,
|
||||
tilesY=0,
|
||||
frameStart=frame_start,
|
||||
frameEnd=frame_end,
|
||||
frameStep=1,
|
||||
fps=fps,
|
||||
app_version=app_version,
|
||||
publish_attributes=inst.data.get("publish_attributes", {}),
|
||||
file_name=render_q.file_name
|
||||
)
|
||||
|
||||
settings = get_project_settings(os.getenv("AVALON_PROJECT"))
|
||||
reviewable_subset_filter = \
|
||||
(settings["deadline"]
|
||||
["publish"]
|
||||
["ProcessSubmittedJobOnFarm"]
|
||||
["aov_filter"])
|
||||
comp = compositions_by_id.get(int(item_id))
|
||||
if not comp:
|
||||
raise ValueError("There is no composition for item {}".
|
||||
format(item_id))
|
||||
instance.outputDir = self._get_output_dir(instance)
|
||||
instance.comp_name = comp.name
|
||||
instance.comp_id = item_id
|
||||
|
||||
if inst["family"] == "renderLocal":
|
||||
# for local renders
|
||||
instance.anatomyData["version"] = instance.version
|
||||
instance.anatomyData["subset"] = instance.subset
|
||||
instance.stagingDir = tempfile.mkdtemp()
|
||||
instance.projectEntity = project_entity
|
||||
is_local = "renderLocal" in inst.data["family"] # legacy
|
||||
if inst.data.get("creator_attributes"):
|
||||
is_local = not inst.data["creator_attributes"].get("farm")
|
||||
if is_local:
|
||||
# for local renders
|
||||
instance = self._update_for_local(instance, project_entity)
|
||||
else:
|
||||
fam = "render.farm"
|
||||
if fam not in instance.families:
|
||||
instance.families.append(fam)
|
||||
instance.toBeRenderedOn = "deadline"
|
||||
instance.renderer = "aerender"
|
||||
instance.farm = True # to skip integrate
|
||||
|
||||
if self.hosts[0] in reviewable_subset_filter.keys():
|
||||
for aov_pattern in \
|
||||
reviewable_subset_filter[self.hosts[0]]:
|
||||
if re.match(aov_pattern, instance.subset):
|
||||
instance.families.append("review")
|
||||
instance.review = True
|
||||
break
|
||||
|
||||
self.log.info("New instance:: {}".format(instance))
|
||||
instances.append(instance)
|
||||
instances.append(instance)
|
||||
instances_to_remove.append(inst)
|
||||
|
||||
for instance in instances_to_remove:
|
||||
context.remove(instance)
|
||||
return instances
|
||||
|
||||
def get_expected_files(self, render_instance):
|
||||
|
|
@ -168,15 +163,11 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender):
|
|||
start = render_instance.frameStart
|
||||
end = render_instance.frameEnd
|
||||
|
||||
# pull file name from Render Queue Output module
|
||||
render_q = self.stub.get_render_info()
|
||||
if not render_q:
|
||||
raise ValueError("No file extension set in Render Queue")
|
||||
_, ext = os.path.splitext(os.path.basename(render_q.file_name))
|
||||
_, ext = os.path.splitext(os.path.basename(render_instance.file_name))
|
||||
|
||||
base_dir = self._get_output_dir(render_instance)
|
||||
expected_files = []
|
||||
if "#" not in render_q.file_name: # single frame (mov)W
|
||||
if "#" not in render_instance.file_name: # single frame (mov)W
|
||||
path = os.path.join(base_dir, "{}_{}_{}.{}".format(
|
||||
render_instance.asset,
|
||||
render_instance.subset,
|
||||
|
|
@ -216,3 +207,24 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender):
|
|||
|
||||
# for submit_publish_job
|
||||
return base_dir
|
||||
|
||||
def _update_for_local(self, instance, project_entity):
|
||||
"""Update old saved instances to current publishing format"""
|
||||
instance.stagingDir = tempfile.mkdtemp()
|
||||
instance.projectEntity = project_entity
|
||||
fam = "render.local"
|
||||
if fam not in instance.families:
|
||||
instance.families.append(fam)
|
||||
|
||||
settings = get_project_settings(os.getenv("AVALON_PROJECT"))
|
||||
reviewable_subset_filter = (settings["deadline"]
|
||||
["publish"]
|
||||
["ProcessSubmittedJobOnFarm"]
|
||||
["aov_filter"].get(self.hosts[0]))
|
||||
for aov_pattern in reviewable_subset_filter:
|
||||
if re.match(aov_pattern, instance.subset):
|
||||
instance.families.append("review")
|
||||
instance.review = True
|
||||
break
|
||||
|
||||
return instance
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
import os
|
||||
from avalon import api
|
||||
|
||||
import pyblish.api
|
||||
from openpype.lib import get_subset_name_with_asset_doc
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.create import get_subset_name
|
||||
|
||||
|
||||
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||
|
|
@ -10,17 +11,48 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
label = "Collect After Effects Workfile Instance"
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def process(self, context):
|
||||
task = api.Session["AVALON_TASK"]
|
||||
existing_instance = None
|
||||
for instance in context:
|
||||
if instance.data["family"] == "workfile":
|
||||
self.log.debug("Workfile instance found, won't create new")
|
||||
existing_instance = instance
|
||||
break
|
||||
|
||||
current_file = context.data["currentFile"]
|
||||
staging_dir = os.path.dirname(current_file)
|
||||
scene_file = os.path.basename(current_file)
|
||||
if existing_instance is None: # old publish
|
||||
instance = self._get_new_instance(context, scene_file)
|
||||
else:
|
||||
instance = existing_instance
|
||||
|
||||
# creating representation
|
||||
representation = {
|
||||
'name': 'aep',
|
||||
'ext': 'aep',
|
||||
'files': scene_file,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
|
||||
if not instance.data.get("representations"):
|
||||
instance.data["representations"] = []
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
instance.data["publish"] = instance.data["active"] # for DL
|
||||
|
||||
def _get_new_instance(self, context, scene_file):
|
||||
task = legacy_io.Session["AVALON_TASK"]
|
||||
version = context.data["version"]
|
||||
asset_entity = context.data["assetEntity"]
|
||||
project_entity = context.data["projectEntity"]
|
||||
|
||||
shared_instance_data = {
|
||||
instance_data = {
|
||||
"active": True,
|
||||
"asset": asset_entity["name"],
|
||||
"task": task,
|
||||
"frameStart": asset_entity["data"]["frameStart"],
|
||||
"frameEnd": asset_entity["data"]["frameEnd"],
|
||||
"handleStart": asset_entity["data"]["handleStart"],
|
||||
|
|
@ -39,13 +71,14 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
|
||||
# workfile instance
|
||||
family = "workfile"
|
||||
subset = get_subset_name_with_asset_doc(
|
||||
subset = get_subset_name(
|
||||
family,
|
||||
"",
|
||||
self.default_variant,
|
||||
context.data["anatomyData"]["task"]["name"],
|
||||
context.data["assetEntity"],
|
||||
context.data["anatomyData"]["project"]["name"],
|
||||
host_name=context.data["hostName"]
|
||||
host_name=context.data["hostName"],
|
||||
project_settings=context.data["project_settings"]
|
||||
)
|
||||
# Create instance
|
||||
instance = context.create_instance(subset)
|
||||
|
|
@ -59,20 +92,6 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
"representations": list()
|
||||
})
|
||||
|
||||
# adding basic script data
|
||||
instance.data.update(shared_instance_data)
|
||||
instance.data.update(instance_data)
|
||||
|
||||
# creating representation
|
||||
representation = {
|
||||
'name': 'aep',
|
||||
'ext': 'aep',
|
||||
'files': scene_file,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info('Publishing After Effects workfile')
|
||||
|
||||
for i in context:
|
||||
self.log.debug(f"{i.data['families']}")
|
||||
return instance
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class ExtractLocalRender(openpype.api.Extractor):
|
|||
order = openpype.api.Extractor.order - 0.47
|
||||
label = "Extract Local Render"
|
||||
hosts = ["aftereffects"]
|
||||
families = ["render"]
|
||||
families = ["renderLocal", "render.local"]
|
||||
|
||||
def process(self, instance):
|
||||
stub = get_stub()
|
||||
|
|
|
|||
|
|
@ -1,15 +1,16 @@
|
|||
import pyblish.api
|
||||
|
||||
import openpype.api
|
||||
from openpype.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
class ExtractSaveScene(openpype.api.Extractor):
|
||||
class ExtractSaveScene(pyblish.api.ContextPlugin):
|
||||
"""Save scene before extraction."""
|
||||
|
||||
order = openpype.api.Extractor.order - 0.48
|
||||
label = "Extract Save Scene"
|
||||
hosts = ["aftereffects"]
|
||||
families = ["workfile"]
|
||||
|
||||
def process(self, instance):
|
||||
def process(self, context):
|
||||
stub = get_stub()
|
||||
stub.save()
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ One of the settings in a scene doesn't match to asset settings in database.
|
|||
### How to repair?
|
||||
|
||||
Change values for {invalid_keys_str} in the scene OR change them in the asset database if they are wrong there.
|
||||
|
||||
In the scene it is right mouse click on published composition > `Composition Settings`.
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__ (optional)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,54 @@
|
|||
import json
|
||||
import pyblish.api
|
||||
from openpype.hosts.aftereffects.api import list_instances
|
||||
|
||||
|
||||
class PreCollectRender(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
Checks if render instance is of old type, adds to families to both
|
||||
existing collectors work same way.
|
||||
|
||||
Could be removed in the future when no one uses old publish.
|
||||
"""
|
||||
|
||||
label = "PreCollect Render"
|
||||
order = pyblish.api.CollectorOrder + 0.400
|
||||
hosts = ["aftereffects"]
|
||||
|
||||
family_remapping = {
|
||||
"render": ("render.farm", "farm"), # (family, label)
|
||||
"renderLocal": ("render.local", "local")
|
||||
}
|
||||
|
||||
def process(self, context):
|
||||
if context.data.get("newPublishing"):
|
||||
self.log.debug("Not applicable for New Publisher, skip")
|
||||
return
|
||||
|
||||
for inst in list_instances():
|
||||
if inst.get("creator_attributes"):
|
||||
raise ValueError("Instance created in New publisher, "
|
||||
"cannot be published in Pyblish.\n"
|
||||
"Please publish in New Publisher "
|
||||
"or recreate instances with legacy Creators")
|
||||
|
||||
if inst["family"] not in self.family_remapping.keys():
|
||||
continue
|
||||
|
||||
if not inst["members"]:
|
||||
raise ValueError("Couldn't find id, unable to publish. " +
|
||||
"Please recreate instance.")
|
||||
|
||||
instance = context.create_instance(inst["subset"])
|
||||
inst["families"] = [self.family_remapping[inst["family"]][0]]
|
||||
instance.data.update(inst)
|
||||
|
||||
self._debug_log(instance)
|
||||
|
||||
def _debug_log(self, instance):
|
||||
def _default_json(value):
|
||||
return str(value)
|
||||
|
||||
self.log.info(
|
||||
json.dumps(instance.data, indent=4, default=_default_json)
|
||||
)
|
||||
|
|
@ -1,7 +1,10 @@
|
|||
from avalon import api
|
||||
import pyblish.api
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
from openpype.pipeline import (
|
||||
PublishXmlValidationError,
|
||||
legacy_io,
|
||||
)
|
||||
from openpype.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
|
|
@ -27,8 +30,8 @@ class ValidateInstanceAssetRepair(pyblish.api.Action):
|
|||
for instance in instances:
|
||||
data = stub.read(instance[0])
|
||||
|
||||
data["asset"] = api.Session["AVALON_ASSET"]
|
||||
stub.imprint(instance[0], data)
|
||||
data["asset"] = legacy_io.Session["AVALON_ASSET"]
|
||||
stub.imprint(instance[0].instance_id, data)
|
||||
|
||||
|
||||
class ValidateInstanceAsset(pyblish.api.InstancePlugin):
|
||||
|
|
@ -51,7 +54,7 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
instance_asset = instance.data["asset"]
|
||||
current_asset = api.Session["AVALON_ASSET"]
|
||||
current_asset = legacy_io.Session["AVALON_ASSET"]
|
||||
msg = (
|
||||
f"Instance asset {instance_asset} is not the same "
|
||||
f"as current context {current_asset}."
|
||||
|
|
|
|||
|
|
@ -1,15 +1,23 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validate scene settings."""
|
||||
"""Validate scene settings.
|
||||
Requires:
|
||||
instance -> assetEntity
|
||||
instance -> anatomyData
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import PublishXmlValidationError
|
||||
from openpype.pipeline import (
|
||||
PublishXmlValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from openpype.hosts.aftereffects.api import get_asset_settings
|
||||
|
||||
|
||||
class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
||||
class ValidateSceneSettings(OptionalPyblishPluginMixin,
|
||||
pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Ensures that Composition Settings (right mouse on comp) are same as
|
||||
in FTrack on task.
|
||||
|
|
@ -50,7 +58,7 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Scene Settings"
|
||||
families = ["render.farm", "render"]
|
||||
families = ["render.farm", "render.local", "render"]
|
||||
hosts = ["aftereffects"]
|
||||
optional = True
|
||||
|
||||
|
|
@ -59,15 +67,21 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
expected_settings = get_asset_settings()
|
||||
# Skip the instance if is not active by data on the instance
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
asset_doc = instance.data["assetEntity"]
|
||||
expected_settings = get_asset_settings(asset_doc)
|
||||
self.log.info("config from DB::{}".format(expected_settings))
|
||||
|
||||
if any(re.search(pattern, os.getenv('AVALON_TASK'))
|
||||
task_name = instance.data["anatomyData"]["task"]["name"]
|
||||
if any(re.search(pattern, task_name)
|
||||
for pattern in self.skip_resolution_check):
|
||||
expected_settings.pop("resolutionWidth")
|
||||
expected_settings.pop("resolutionHeight")
|
||||
|
||||
if any(re.search(pattern, os.getenv('AVALON_TASK'))
|
||||
if any(re.search(pattern, task_name)
|
||||
for pattern in self.skip_timelines_check):
|
||||
expected_settings.pop('fps', None)
|
||||
expected_settings.pop('frameStart', None)
|
||||
|
|
@ -87,10 +101,14 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
duration = instance.data.get("frameEndHandle") - \
|
||||
instance.data.get("frameStartHandle") + 1
|
||||
|
||||
self.log.debug("filtered config::{}".format(expected_settings))
|
||||
self.log.debug("validated items::{}".format(expected_settings))
|
||||
|
||||
current_settings = {
|
||||
"fps": fps,
|
||||
"frameStart": instance.data.get("frameStart"),
|
||||
"frameEnd": instance.data.get("frameEnd"),
|
||||
"handleStart": instance.data.get("handleStart"),
|
||||
"handleEnd": instance.data.get("handleEnd"),
|
||||
"frameStartHandle": instance.data.get("frameStartHandle"),
|
||||
"frameEndHandle": instance.data.get("frameEndHandle"),
|
||||
"resolutionWidth": instance.data.get("resolutionWidth"),
|
||||
|
|
@ -103,24 +121,22 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
invalid_keys = set()
|
||||
for key, value in expected_settings.items():
|
||||
if value != current_settings[key]:
|
||||
invalid_settings.append(
|
||||
"{} expected: {} found: {}".format(key, value,
|
||||
current_settings[key])
|
||||
)
|
||||
msg = "'{}' expected: '{}' found: '{}'".format(
|
||||
key, value, current_settings[key])
|
||||
|
||||
if key == "duration" and expected_settings.get("handleStart"):
|
||||
msg += "Handles included in calculation. Remove " \
|
||||
"handles in DB or extend frame range in " \
|
||||
"Composition Setting."
|
||||
|
||||
invalid_settings.append(msg)
|
||||
invalid_keys.add(key)
|
||||
|
||||
if ((expected_settings.get("handleStart")
|
||||
or expected_settings.get("handleEnd"))
|
||||
and invalid_settings):
|
||||
msg = "Handles included in calculation. Remove handles in DB " +\
|
||||
"or extend frame range in Composition Setting."
|
||||
invalid_settings[-1]["reason"] = msg
|
||||
|
||||
msg = "Found invalid settings:\n{}".format(
|
||||
"\n".join(invalid_settings)
|
||||
)
|
||||
|
||||
if invalid_settings:
|
||||
msg = "Found invalid settings:\n{}".format(
|
||||
"\n".join(invalid_settings)
|
||||
)
|
||||
|
||||
invalid_keys_str = ",".join(invalid_keys)
|
||||
break_str = "<br/>"
|
||||
invalid_setting_str = "<b>Found invalid settings:</b><br/>{}".\
|
||||
|
|
|
|||
|
|
@ -1,52 +1,6 @@
|
|||
import os
|
||||
from .addon import BlenderAddon
|
||||
|
||||
|
||||
def add_implementation_envs(env, _app):
|
||||
"""Modify environments to contain all required for implementation."""
|
||||
# Prepare path to implementation script
|
||||
implementation_user_script_path = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)),
|
||||
"blender_addon"
|
||||
)
|
||||
|
||||
# Add blender implementation script path to PYTHONPATH
|
||||
python_path = env.get("PYTHONPATH") or ""
|
||||
python_path_parts = [
|
||||
path
|
||||
for path in python_path.split(os.pathsep)
|
||||
if path
|
||||
]
|
||||
python_path_parts.insert(0, implementation_user_script_path)
|
||||
env["PYTHONPATH"] = os.pathsep.join(python_path_parts)
|
||||
|
||||
# Modify Blender user scripts path
|
||||
previous_user_scripts = set()
|
||||
# Implementation path is added to set for easier paths check inside loops
|
||||
# - will be removed at the end
|
||||
previous_user_scripts.add(implementation_user_script_path)
|
||||
|
||||
openpype_blender_user_scripts = (
|
||||
env.get("OPENPYPE_BLENDER_USER_SCRIPTS") or ""
|
||||
)
|
||||
for path in openpype_blender_user_scripts.split(os.pathsep):
|
||||
if path:
|
||||
previous_user_scripts.add(os.path.normpath(path))
|
||||
|
||||
blender_user_scripts = env.get("BLENDER_USER_SCRIPTS") or ""
|
||||
for path in blender_user_scripts.split(os.pathsep):
|
||||
if path:
|
||||
previous_user_scripts.add(os.path.normpath(path))
|
||||
|
||||
# Remove implementation path from user script paths as is set to
|
||||
# `BLENDER_USER_SCRIPTS`
|
||||
previous_user_scripts.remove(implementation_user_script_path)
|
||||
env["BLENDER_USER_SCRIPTS"] = implementation_user_script_path
|
||||
|
||||
# Set custom user scripts env
|
||||
env["OPENPYPE_BLENDER_USER_SCRIPTS"] = os.pathsep.join(
|
||||
previous_user_scripts
|
||||
)
|
||||
|
||||
# Define Qt binding if not defined
|
||||
if not env.get("QT_PREFERRED_BINDING"):
|
||||
env["QT_PREFERRED_BINDING"] = "PySide2"
|
||||
__all__ = (
|
||||
"BlenderAddon",
|
||||
)
|
||||
|
|
|
|||
73
openpype/hosts/blender/addon.py
Normal file
73
openpype/hosts/blender/addon.py
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
import os
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
|
||||
BLENDER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class BlenderAddon(OpenPypeModule, IHostAddon):
|
||||
name = "blender"
|
||||
host_name = "blender"
|
||||
|
||||
def initialize(self, module_settings):
|
||||
self.enabled = True
|
||||
|
||||
def add_implementation_envs(self, env, _app):
|
||||
"""Modify environments to contain all required for implementation."""
|
||||
# Prepare path to implementation script
|
||||
implementation_user_script_path = os.path.join(
|
||||
BLENDER_ROOT_DIR,
|
||||
"blender_addon"
|
||||
)
|
||||
|
||||
# Add blender implementation script path to PYTHONPATH
|
||||
python_path = env.get("PYTHONPATH") or ""
|
||||
python_path_parts = [
|
||||
path
|
||||
for path in python_path.split(os.pathsep)
|
||||
if path
|
||||
]
|
||||
python_path_parts.insert(0, implementation_user_script_path)
|
||||
env["PYTHONPATH"] = os.pathsep.join(python_path_parts)
|
||||
|
||||
# Modify Blender user scripts path
|
||||
previous_user_scripts = set()
|
||||
# Implementation path is added to set for easier paths check inside
|
||||
# loops - will be removed at the end
|
||||
previous_user_scripts.add(implementation_user_script_path)
|
||||
|
||||
openpype_blender_user_scripts = (
|
||||
env.get("OPENPYPE_BLENDER_USER_SCRIPTS") or ""
|
||||
)
|
||||
for path in openpype_blender_user_scripts.split(os.pathsep):
|
||||
if path:
|
||||
previous_user_scripts.add(os.path.normpath(path))
|
||||
|
||||
blender_user_scripts = env.get("BLENDER_USER_SCRIPTS") or ""
|
||||
for path in blender_user_scripts.split(os.pathsep):
|
||||
if path:
|
||||
previous_user_scripts.add(os.path.normpath(path))
|
||||
|
||||
# Remove implementation path from user script paths as is set to
|
||||
# `BLENDER_USER_SCRIPTS`
|
||||
previous_user_scripts.remove(implementation_user_script_path)
|
||||
env["BLENDER_USER_SCRIPTS"] = implementation_user_script_path
|
||||
|
||||
# Set custom user scripts env
|
||||
env["OPENPYPE_BLENDER_USER_SCRIPTS"] = os.pathsep.join(
|
||||
previous_user_scripts
|
||||
)
|
||||
|
||||
# Define Qt binding if not defined
|
||||
if not env.get("QT_PREFERRED_BINDING"):
|
||||
env["QT_PREFERRED_BINDING"] = "PySide2"
|
||||
|
||||
def get_launch_hook_paths(self, app):
|
||||
if app.host_name != self.host_name:
|
||||
return []
|
||||
return [
|
||||
os.path.join(BLENDER_ROOT_DIR, "hooks")
|
||||
]
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".blend"]
|
||||
|
|
@ -234,7 +234,7 @@ def lsattrs(attrs: Dict) -> List:
|
|||
def read(node: bpy.types.bpy_struct_meta_idprop):
|
||||
"""Return user-defined attributes from `node`"""
|
||||
|
||||
data = dict(node.get(pipeline.AVALON_PROPERTY))
|
||||
data = dict(node.get(pipeline.AVALON_PROPERTY, {}))
|
||||
|
||||
# Ignore hidden/internal data
|
||||
data = {
|
||||
|
|
|
|||
|
|
@ -15,9 +15,9 @@ from Qt import QtWidgets, QtCore
|
|||
import bpy
|
||||
import bpy.utils.previews
|
||||
|
||||
import avalon.api
|
||||
from openpype.tools.utils import host_tools
|
||||
from openpype import style
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.tools.utils import host_tools
|
||||
|
||||
from .workio import OpenFileCacher
|
||||
|
||||
|
|
@ -26,7 +26,7 @@ PREVIEW_COLLECTIONS: Dict = dict()
|
|||
# This seems like a good value to keep the Qt app responsive and doesn't slow
|
||||
# down Blender. At least on macOS I the interace of Blender gets very laggy if
|
||||
# you make it smaller.
|
||||
TIMER_INTERVAL: float = 0.01
|
||||
TIMER_INTERVAL: float = 0.01 if platform.system() == "Windows" else 0.1
|
||||
|
||||
|
||||
class BlenderApplication(QtWidgets.QApplication):
|
||||
|
|
@ -164,6 +164,12 @@ def _process_app_events() -> Optional[float]:
|
|||
dialog.setDetailedText(detail)
|
||||
dialog.exec_()
|
||||
|
||||
# Refresh Manager
|
||||
if GlobalClass.app:
|
||||
manager = GlobalClass.app.get_window("WM_OT_avalon_manager")
|
||||
if manager:
|
||||
manager.refresh()
|
||||
|
||||
if not GlobalClass.is_windows:
|
||||
if OpenFileCacher.opening_file:
|
||||
return TIMER_INTERVAL
|
||||
|
|
@ -192,10 +198,11 @@ class LaunchQtApp(bpy.types.Operator):
|
|||
self._app = BlenderApplication.get_app()
|
||||
GlobalClass.app = self._app
|
||||
|
||||
bpy.app.timers.register(
|
||||
_process_app_events,
|
||||
persistent=True
|
||||
)
|
||||
if not bpy.app.timers.is_registered(_process_app_events):
|
||||
bpy.app.timers.register(
|
||||
_process_app_events,
|
||||
persistent=True
|
||||
)
|
||||
|
||||
def execute(self, context):
|
||||
"""Execute the operator.
|
||||
|
|
@ -220,12 +227,9 @@ class LaunchQtApp(bpy.types.Operator):
|
|||
self._app.store_window(self.bl_idname, window)
|
||||
self._window = window
|
||||
|
||||
if not isinstance(
|
||||
self._window,
|
||||
(QtWidgets.QMainWindow, QtWidgets.QDialog, ModuleType)
|
||||
):
|
||||
if not isinstance(self._window, (QtWidgets.QWidget, ModuleType)):
|
||||
raise AttributeError(
|
||||
"`window` should be a `QDialog or module`. Got: {}".format(
|
||||
"`window` should be a `QWidget or module`. Got: {}".format(
|
||||
str(type(window))
|
||||
)
|
||||
)
|
||||
|
|
@ -249,9 +253,9 @@ class LaunchQtApp(bpy.types.Operator):
|
|||
self._window.setWindowFlags(on_top_flags)
|
||||
self._window.show()
|
||||
|
||||
if on_top_flags != origin_flags:
|
||||
self._window.setWindowFlags(origin_flags)
|
||||
self._window.show()
|
||||
# if on_top_flags != origin_flags:
|
||||
# self._window.setWindowFlags(origin_flags)
|
||||
# self._window.show()
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
|
@ -279,7 +283,7 @@ class LaunchLoader(LaunchQtApp):
|
|||
|
||||
def before_window_show(self):
|
||||
self._window.set_context(
|
||||
{"asset": avalon.api.Session["AVALON_ASSET"]},
|
||||
{"asset": legacy_io.Session["AVALON_ASSET"]},
|
||||
refresh=True
|
||||
)
|
||||
|
||||
|
|
@ -327,8 +331,8 @@ class LaunchWorkFiles(LaunchQtApp):
|
|||
def execute(self, context):
|
||||
result = super().execute(context)
|
||||
self._window.set_context({
|
||||
"asset": avalon.api.Session["AVALON_ASSET"],
|
||||
"task": avalon.api.Session["AVALON_TASK"]
|
||||
"asset": legacy_io.Session["AVALON_ASSET"],
|
||||
"task": legacy_io.Session["AVALON_TASK"]
|
||||
})
|
||||
return result
|
||||
|
||||
|
|
@ -358,8 +362,8 @@ class TOPBAR_MT_avalon(bpy.types.Menu):
|
|||
else:
|
||||
pyblish_menu_icon_id = 0
|
||||
|
||||
asset = avalon.api.Session['AVALON_ASSET']
|
||||
task = avalon.api.Session['AVALON_TASK']
|
||||
asset = legacy_io.Session['AVALON_ASSET']
|
||||
task = legacy_io.Session['AVALON_TASK']
|
||||
context_label = f"{asset}, {task}"
|
||||
context_label_item = layout.row()
|
||||
context_label_item.operator(
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
import sys
|
||||
import importlib
|
||||
import traceback
|
||||
from typing import Callable, Dict, Iterator, List, Optional
|
||||
|
||||
|
|
@ -10,10 +9,11 @@ from . import lib
|
|||
from . import ops
|
||||
|
||||
import pyblish.api
|
||||
import avalon.api
|
||||
from avalon import io, schema
|
||||
|
||||
from openpype.client import get_asset_by_name
|
||||
from openpype.pipeline import (
|
||||
schema,
|
||||
legacy_io,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
|
|
@ -84,18 +84,16 @@ def uninstall():
|
|||
|
||||
|
||||
def set_start_end_frames():
|
||||
asset_name = io.Session["AVALON_ASSET"]
|
||||
asset_doc = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
})
|
||||
project_name = legacy_io.active_project()
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
|
||||
scene = bpy.context.scene
|
||||
|
||||
# Default scene settings
|
||||
frameStart = scene.frame_start
|
||||
frameEnd = scene.frame_end
|
||||
fps = scene.render.fps
|
||||
fps = scene.render.fps / scene.render.fps_base
|
||||
resolution_x = scene.render.resolution_x
|
||||
resolution_y = scene.render.resolution_y
|
||||
|
||||
|
|
@ -118,7 +116,8 @@ def set_start_end_frames():
|
|||
|
||||
scene.frame_start = frameStart
|
||||
scene.frame_end = frameEnd
|
||||
scene.render.fps = fps
|
||||
scene.render.fps = round(fps)
|
||||
scene.render.fps_base = round(fps) / fps
|
||||
scene.render.resolution_x = resolution_x
|
||||
scene.render.resolution_y = resolution_y
|
||||
|
||||
|
|
@ -189,7 +188,7 @@ def _on_task_changed():
|
|||
# `directory` attribute, so it opens in that directory (does it?).
|
||||
# https://docs.blender.org/api/blender2.8/bpy.types.Operator.html#calling-a-file-selector
|
||||
# https://docs.blender.org/api/blender2.8/bpy.types.WindowManager.html#bpy.types.WindowManager.fileselect_add
|
||||
workdir = avalon.api.Session["AVALON_WORKDIR"]
|
||||
workdir = legacy_io.Session["AVALON_WORKDIR"]
|
||||
log.debug("New working directory: %s", workdir)
|
||||
|
||||
|
||||
|
|
@ -200,27 +199,6 @@ def _register_events():
|
|||
log.info("Installed event callback for 'taskChanged'...")
|
||||
|
||||
|
||||
def reload_pipeline(*args):
|
||||
"""Attempt to reload pipeline at run-time.
|
||||
|
||||
Warning:
|
||||
This is primarily for development and debugging purposes and not well
|
||||
tested.
|
||||
|
||||
"""
|
||||
|
||||
avalon.api.uninstall()
|
||||
|
||||
for module in (
|
||||
"avalon.io",
|
||||
"avalon.lib",
|
||||
"avalon.pipeline",
|
||||
"avalon.api",
|
||||
):
|
||||
module = importlib.import_module(module)
|
||||
importlib.reload(module)
|
||||
|
||||
|
||||
def _discover_gui() -> Optional[Callable]:
|
||||
"""Return the most desirable of the currently registered GUIs"""
|
||||
|
||||
|
|
|
|||
|
|
@ -266,7 +266,7 @@ class AssetLoader(LoaderPlugin):
|
|||
# Only containerise if it's not already a collection from a .blend file.
|
||||
# representation = context["representation"]["name"]
|
||||
# if representation != "blend":
|
||||
# from avalon.blender.pipeline import containerise
|
||||
# from openpype.hosts.blender.api.pipeline import containerise
|
||||
# return containerise(
|
||||
# name=name,
|
||||
# namespace=namespace,
|
||||
|
|
|
|||
|
|
@ -5,8 +5,6 @@ from typing import List, Optional
|
|||
|
||||
import bpy
|
||||
|
||||
from openpype.pipeline import HOST_WORKFILE_EXTENSIONS
|
||||
|
||||
|
||||
class OpenFileCacher:
|
||||
"""Store information about opening file.
|
||||
|
|
@ -78,7 +76,7 @@ def has_unsaved_changes() -> bool:
|
|||
def file_extensions() -> List[str]:
|
||||
"""Return the supported file extensions for Blender scene files."""
|
||||
|
||||
return HOST_WORKFILE_EXTENSIONS["blender"]
|
||||
return [".blend"]
|
||||
|
||||
|
||||
def work_root(session: dict) -> str:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,10 @@
|
|||
from avalon import pipeline
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.hosts.blender import api
|
||||
|
||||
pipeline.install(api)
|
||||
|
||||
def register():
|
||||
install_host(api)
|
||||
|
||||
|
||||
def unregister():
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from platform import system
|
||||
from openpype.lib import PreLaunchHook
|
||||
|
||||
|
||||
|
|
@ -13,12 +14,9 @@ class InstallPySideToBlender(PreLaunchHook):
|
|||
|
||||
For pipeline implementation is required to have Qt binding installed in
|
||||
blender's python packages.
|
||||
|
||||
Prelaunch hook can work only on Windows right now.
|
||||
"""
|
||||
|
||||
app_groups = ["blender"]
|
||||
platforms = ["windows"]
|
||||
|
||||
def execute(self):
|
||||
# Prelaunch hook is not crucial
|
||||
|
|
@ -34,25 +32,28 @@ class InstallPySideToBlender(PreLaunchHook):
|
|||
# Get blender's python directory
|
||||
version_regex = re.compile(r"^[2-3]\.[0-9]+$")
|
||||
|
||||
platform = system().lower()
|
||||
executable = self.launch_context.executable.executable_path
|
||||
if os.path.basename(executable).lower() != "blender.exe":
|
||||
expected_executable = "blender"
|
||||
if platform == "windows":
|
||||
expected_executable += ".exe"
|
||||
|
||||
if os.path.basename(executable).lower() != expected_executable:
|
||||
self.log.info((
|
||||
"Executable does not lead to blender.exe file. Can't determine"
|
||||
" blender's python to check/install PySide2."
|
||||
f"Executable does not lead to {expected_executable} file."
|
||||
"Can't determine blender's python to check/install PySide2."
|
||||
))
|
||||
return
|
||||
|
||||
executable_dir = os.path.dirname(executable)
|
||||
versions_dir = os.path.dirname(executable)
|
||||
if platform == "darwin":
|
||||
versions_dir = os.path.join(
|
||||
os.path.dirname(versions_dir), "Resources"
|
||||
)
|
||||
version_subfolders = []
|
||||
for name in os.listdir(executable_dir):
|
||||
fullpath = os.path.join(name, executable_dir)
|
||||
if not os.path.isdir(fullpath):
|
||||
continue
|
||||
|
||||
if not version_regex.match(name):
|
||||
continue
|
||||
|
||||
version_subfolders.append(name)
|
||||
for dir_entry in os.scandir(versions_dir):
|
||||
if dir_entry.is_dir() and version_regex.match(dir_entry.name):
|
||||
version_subfolders.append(dir_entry.name)
|
||||
|
||||
if not version_subfolders:
|
||||
self.log.info(
|
||||
|
|
@ -72,16 +73,21 @@ class InstallPySideToBlender(PreLaunchHook):
|
|||
|
||||
version_subfolder = version_subfolders[0]
|
||||
|
||||
pythond_dir = os.path.join(
|
||||
os.path.dirname(executable),
|
||||
version_subfolder,
|
||||
"python"
|
||||
)
|
||||
python_dir = os.path.join(versions_dir, version_subfolder, "python")
|
||||
python_lib = os.path.join(python_dir, "lib")
|
||||
python_version = "python"
|
||||
|
||||
if platform != "windows":
|
||||
for dir_entry in os.scandir(python_lib):
|
||||
if dir_entry.is_dir() and dir_entry.name.startswith("python"):
|
||||
python_lib = dir_entry.path
|
||||
python_version = dir_entry.name
|
||||
break
|
||||
|
||||
# Change PYTHONPATH to contain blender's packages as first
|
||||
python_paths = [
|
||||
os.path.join(pythond_dir, "lib"),
|
||||
os.path.join(pythond_dir, "lib", "site-packages"),
|
||||
python_lib,
|
||||
os.path.join(python_lib, "site-packages"),
|
||||
]
|
||||
python_path = self.launch_context.env.get("PYTHONPATH") or ""
|
||||
for path in python_path.split(os.pathsep):
|
||||
|
|
@ -91,7 +97,15 @@ class InstallPySideToBlender(PreLaunchHook):
|
|||
self.launch_context.env["PYTHONPATH"] = os.pathsep.join(python_paths)
|
||||
|
||||
# Get blender's python executable
|
||||
python_executable = os.path.join(pythond_dir, "bin", "python.exe")
|
||||
python_bin = os.path.join(python_dir, "bin")
|
||||
if platform == "windows":
|
||||
python_executable = os.path.join(python_bin, "python.exe")
|
||||
else:
|
||||
python_executable = os.path.join(python_bin, python_version)
|
||||
# Check for python with enabled 'pymalloc'
|
||||
if not os.path.exists(python_executable):
|
||||
python_executable += "m"
|
||||
|
||||
if not os.path.exists(python_executable):
|
||||
self.log.warning(
|
||||
"Couldn't find python executable for blender. {}".format(
|
||||
|
|
@ -106,7 +120,15 @@ class InstallPySideToBlender(PreLaunchHook):
|
|||
return
|
||||
|
||||
# Install PySide2 in blender's python
|
||||
self.install_pyside_windows(python_executable)
|
||||
if platform == "windows":
|
||||
result = self.install_pyside_windows(python_executable)
|
||||
else:
|
||||
result = self.install_pyside(python_executable)
|
||||
|
||||
if result:
|
||||
self.log.info("Successfully installed PySide2 module to blender.")
|
||||
else:
|
||||
self.log.warning("Failed to install PySide2 module to blender.")
|
||||
|
||||
def install_pyside_windows(self, python_executable):
|
||||
"""Install PySide2 python module to blender's python.
|
||||
|
|
@ -144,19 +166,41 @@ class InstallPySideToBlender(PreLaunchHook):
|
|||
lpDirectory=os.path.dirname(python_executable)
|
||||
)
|
||||
process_handle = process_info["hProcess"]
|
||||
obj = win32event.WaitForSingleObject(
|
||||
process_handle, win32event.INFINITE
|
||||
)
|
||||
win32event.WaitForSingleObject(process_handle, win32event.INFINITE)
|
||||
returncode = win32process.GetExitCodeProcess(process_handle)
|
||||
if returncode == 0:
|
||||
self.log.info(
|
||||
"Successfully installed PySide2 module to blender."
|
||||
)
|
||||
return
|
||||
return returncode == 0
|
||||
except pywintypes.error:
|
||||
pass
|
||||
|
||||
self.log.warning("Failed to install PySide2 module to blender.")
|
||||
def install_pyside(self, python_executable):
|
||||
"""Install PySide2 python module to blender's python."""
|
||||
try:
|
||||
# Parameters
|
||||
# - use "-m pip" as module pip to install PySide2 and argument
|
||||
# "--ignore-installed" is to force install module to blender's
|
||||
# site-packages and make sure it is binary compatible
|
||||
args = [
|
||||
python_executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"--ignore-installed",
|
||||
"PySide2",
|
||||
]
|
||||
process = subprocess.Popen(
|
||||
args, stdout=subprocess.PIPE, universal_newlines=True
|
||||
)
|
||||
process.communicate()
|
||||
return process.returncode == 0
|
||||
except PermissionError:
|
||||
self.log.warning(
|
||||
"Permission denied with command:"
|
||||
"\"{}\".".format(" ".join(args))
|
||||
)
|
||||
except OSError as error:
|
||||
self.log.warning(f"OS error has occurred: \"{error}\".")
|
||||
except subprocess.SubprocessError:
|
||||
pass
|
||||
|
||||
def is_pyside_installed(self, python_executable):
|
||||
"""Check if PySide2 module is in blender's pip list.
|
||||
|
|
@ -169,7 +213,7 @@ class InstallPySideToBlender(PreLaunchHook):
|
|||
args = [python_executable, "-m", "pip", "list"]
|
||||
process = subprocess.Popen(args, stdout=subprocess.PIPE)
|
||||
stdout, _ = process.communicate()
|
||||
lines = stdout.decode().split("\r\n")
|
||||
lines = stdout.decode().split(os.linesep)
|
||||
# Second line contain dashes that define maximum length of module name.
|
||||
# Second column of dashes define maximum length of module version.
|
||||
package_dashes, *_ = lines[1].split(" ")
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from openpype.pipeline import legacy_io
|
||||
import openpype.hosts.blender.api.plugin
|
||||
from openpype.hosts.blender.api import lib
|
||||
|
||||
|
|
@ -22,7 +22,7 @@ class CreateAction(openpype.hosts.blender.api.plugin.Creator):
|
|||
name = openpype.hosts.blender.api.plugin.asset_name(asset, subset)
|
||||
collection = bpy.data.collections.new(name=name)
|
||||
bpy.context.scene.collection.children.link(collection)
|
||||
self.data['task'] = api.Session.get('AVALON_TASK')
|
||||
self.data['task'] = legacy_io.Session.get('AVALON_TASK')
|
||||
lib.imprint(collection, self.data)
|
||||
|
||||
if (self.options or {}).get("useSelection"):
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.hosts.blender.api import plugin, lib, ops
|
||||
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
|
||||
|
||||
|
|
@ -37,7 +37,7 @@ class CreateAnimation(plugin.Creator):
|
|||
# asset_group.empty_display_type = 'SINGLE_ARROW'
|
||||
asset_group = bpy.data.collections.new(name=name)
|
||||
instances.children.link(asset_group)
|
||||
self.data['task'] = api.Session.get('AVALON_TASK')
|
||||
self.data['task'] = legacy_io.Session.get('AVALON_TASK')
|
||||
lib.imprint(asset_group, self.data)
|
||||
|
||||
if (self.options or {}).get("useSelection"):
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.hosts.blender.api import plugin, lib, ops
|
||||
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
|
||||
|
||||
|
|
@ -40,7 +40,7 @@ class CreateCamera(plugin.Creator):
|
|||
asset_group = bpy.data.objects.new(name=name, object_data=None)
|
||||
asset_group.empty_display_type = 'SINGLE_ARROW'
|
||||
instances.objects.link(asset_group)
|
||||
self.data['task'] = api.Session.get('AVALON_TASK')
|
||||
self.data['task'] = legacy_io.Session.get('AVALON_TASK')
|
||||
print(f"self.data: {self.data}")
|
||||
lib.imprint(asset_group, self.data)
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.hosts.blender.api import plugin, lib, ops
|
||||
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
|
||||
|
||||
|
|
@ -34,7 +34,7 @@ class CreateLayout(plugin.Creator):
|
|||
asset_group = bpy.data.objects.new(name=name, object_data=None)
|
||||
asset_group.empty_display_type = 'SINGLE_ARROW'
|
||||
instances.objects.link(asset_group)
|
||||
self.data['task'] = api.Session.get('AVALON_TASK')
|
||||
self.data['task'] = legacy_io.Session.get('AVALON_TASK')
|
||||
lib.imprint(asset_group, self.data)
|
||||
|
||||
# Add selected objects to instance
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.hosts.blender.api import plugin, lib, ops
|
||||
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
|
||||
|
||||
|
|
@ -34,7 +34,7 @@ class CreateModel(plugin.Creator):
|
|||
asset_group = bpy.data.objects.new(name=name, object_data=None)
|
||||
asset_group.empty_display_type = 'SINGLE_ARROW'
|
||||
instances.objects.link(asset_group)
|
||||
self.data['task'] = api.Session.get('AVALON_TASK')
|
||||
self.data['task'] = legacy_io.Session.get('AVALON_TASK')
|
||||
lib.imprint(asset_group, self.data)
|
||||
|
||||
# Add selected objects to instance
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from openpype.pipeline import legacy_io
|
||||
import openpype.hosts.blender.api.plugin
|
||||
from openpype.hosts.blender.api import lib
|
||||
|
||||
|
|
@ -22,7 +22,7 @@ class CreatePointcache(openpype.hosts.blender.api.plugin.Creator):
|
|||
name = openpype.hosts.blender.api.plugin.asset_name(asset, subset)
|
||||
collection = bpy.data.collections.new(name=name)
|
||||
bpy.context.scene.collection.children.link(collection)
|
||||
self.data['task'] = api.Session.get('AVALON_TASK')
|
||||
self.data['task'] = legacy_io.Session.get('AVALON_TASK')
|
||||
lib.imprint(collection, self.data)
|
||||
|
||||
if (self.options or {}).get("useSelection"):
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.hosts.blender.api import plugin, lib, ops
|
||||
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
|
||||
|
||||
|
|
@ -34,7 +34,7 @@ class CreateRig(plugin.Creator):
|
|||
asset_group = bpy.data.objects.new(name=name, object_data=None)
|
||||
asset_group.empty_display_type = 'SINGLE_ARROW'
|
||||
instances.objects.link(asset_group)
|
||||
self.data['task'] = api.Session.get('AVALON_TASK')
|
||||
self.data['task'] = legacy_io.Session.get('AVALON_TASK')
|
||||
lib.imprint(asset_group, self.data)
|
||||
|
||||
# Add selected objects to instance
|
||||
|
|
|
|||
|
|
@ -6,12 +6,12 @@ from typing import Dict, List, Optional
|
|||
|
||||
import bpy
|
||||
|
||||
from openpype import lib
|
||||
from openpype.pipeline import (
|
||||
legacy_create,
|
||||
get_representation_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from openpype.pipeline.create import get_legacy_creator_by_name
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api.pipeline import (
|
||||
AVALON_CONTAINERS,
|
||||
|
|
@ -157,7 +157,7 @@ class BlendLayoutLoader(plugin.AssetLoader):
|
|||
t.id = local_obj
|
||||
|
||||
elif local_obj.type == 'EMPTY':
|
||||
creator_plugin = lib.get_creator_by_name("CreateAnimation")
|
||||
creator_plugin = get_legacy_creator_by_name("CreateAnimation")
|
||||
if not creator_plugin:
|
||||
raise ValueError("Creator plugin \"CreateAnimation\" was "
|
||||
"not found.")
|
||||
|
|
|
|||
|
|
@ -118,7 +118,7 @@ class JsonLayoutLoader(plugin.AssetLoader):
|
|||
# Camera creation when loading a layout is not necessary for now,
|
||||
# but the code is worth keeping in case we need it in the future.
|
||||
# # Create the camera asset and the camera instance
|
||||
# creator_plugin = lib.get_creator_by_name("CreateCamera")
|
||||
# creator_plugin = get_legacy_creator_by_name("CreateCamera")
|
||||
# if not creator_plugin:
|
||||
# raise ValueError("Creator plugin \"CreateCamera\" was "
|
||||
# "not found.")
|
||||
|
|
|
|||
|
|
@ -6,12 +6,12 @@ from typing import Dict, List, Optional
|
|||
|
||||
import bpy
|
||||
|
||||
from openpype import lib
|
||||
from openpype.pipeline import (
|
||||
legacy_create,
|
||||
get_representation_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from openpype.pipeline.create import get_legacy_creator_by_name
|
||||
from openpype.hosts.blender.api import (
|
||||
plugin,
|
||||
get_selection,
|
||||
|
|
@ -244,7 +244,7 @@ class BlendRigLoader(plugin.AssetLoader):
|
|||
objects = self._process(libpath, asset_group, group_name, action)
|
||||
|
||||
if create_animation:
|
||||
creator_plugin = lib.get_creator_by_name("CreateAnimation")
|
||||
creator_plugin = get_legacy_creator_by_name("CreateAnimation")
|
||||
if not creator_plugin:
|
||||
raise ValueError("Creator plugin \"CreateAnimation\" was "
|
||||
"not found.")
|
||||
|
|
|
|||
|
|
@ -1,13 +1,11 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
import bpy
|
||||
import bpy_extras
|
||||
import bpy_extras.anim_utils
|
||||
|
||||
from avalon import io
|
||||
from openpype.client import get_representation_by_name
|
||||
from openpype.hosts.blender.api import plugin
|
||||
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
|
||||
import openpype.api
|
||||
|
|
@ -131,43 +129,32 @@ class ExtractLayout(openpype.api.Extractor):
|
|||
|
||||
fbx_count = 0
|
||||
|
||||
project_name = instance.context.data["projectEntity"]["name"]
|
||||
for asset in asset_group.children:
|
||||
metadata = asset.get(AVALON_PROPERTY)
|
||||
|
||||
parent = metadata["parent"]
|
||||
version_id = metadata["parent"]
|
||||
family = metadata["family"]
|
||||
|
||||
self.log.debug("Parent: {}".format(parent))
|
||||
self.log.debug("Parent: {}".format(version_id))
|
||||
# Get blend reference
|
||||
blend = io.find_one(
|
||||
{
|
||||
"type": "representation",
|
||||
"parent": ObjectId(parent),
|
||||
"name": "blend"
|
||||
},
|
||||
projection={"_id": True})
|
||||
blend = get_representation_by_name(
|
||||
project_name, "blend", version_id, fields=["_id"]
|
||||
)
|
||||
blend_id = None
|
||||
if blend:
|
||||
blend_id = blend["_id"]
|
||||
# Get fbx reference
|
||||
fbx = io.find_one(
|
||||
{
|
||||
"type": "representation",
|
||||
"parent": ObjectId(parent),
|
||||
"name": "fbx"
|
||||
},
|
||||
projection={"_id": True})
|
||||
fbx = get_representation_by_name(
|
||||
project_name, "fbx", version_id, fields=["_id"]
|
||||
)
|
||||
fbx_id = None
|
||||
if fbx:
|
||||
fbx_id = fbx["_id"]
|
||||
# Get abc reference
|
||||
abc = io.find_one(
|
||||
{
|
||||
"type": "representation",
|
||||
"parent": ObjectId(parent),
|
||||
"name": "abc"
|
||||
},
|
||||
projection={"_id": True})
|
||||
abc = get_representation_by_name(
|
||||
project_name, "abc", version_id, fields=["_id"]
|
||||
)
|
||||
abc_id = None
|
||||
if abc:
|
||||
abc_id = abc["_id"]
|
||||
|
|
@ -193,7 +180,7 @@ class ExtractLayout(openpype.api.Extractor):
|
|||
"rotation": {
|
||||
"x": asset.rotation_euler.x,
|
||||
"y": asset.rotation_euler.y,
|
||||
"z": asset.rotation_euler.z,
|
||||
"z": asset.rotation_euler.z
|
||||
},
|
||||
"scale": {
|
||||
"x": asset.scale.x,
|
||||
|
|
@ -202,6 +189,18 @@ class ExtractLayout(openpype.api.Extractor):
|
|||
}
|
||||
}
|
||||
|
||||
json_element["transform_matrix"] = []
|
||||
|
||||
for row in list(asset.matrix_world.transposed()):
|
||||
json_element["transform_matrix"].append(list(row))
|
||||
|
||||
json_element["basis"] = [
|
||||
[1, 0, 0, 0],
|
||||
[0, -1, 0, 0],
|
||||
[0, 0, 1, 0],
|
||||
[0, 0, 0, 1]
|
||||
]
|
||||
|
||||
# Extract the animation as well
|
||||
if family == "rig":
|
||||
f, n = self._export_animation(
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import json
|
||||
|
||||
from avalon import io
|
||||
import pyblish.api
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,9 @@
|
|||
from typing import List
|
||||
|
||||
import mathutils
|
||||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
import openpype.hosts.blender.api.action
|
||||
|
||||
|
||||
|
|
@ -17,18 +18,15 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin):
|
|||
order = openpype.api.ValidateContentsOrder
|
||||
hosts = ["blender"]
|
||||
families = ["camera"]
|
||||
category = "geometry"
|
||||
version = (0, 1, 0)
|
||||
label = "Zero Keyframe"
|
||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||
|
||||
_identity = mathutils.Matrix()
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance) -> List:
|
||||
@staticmethod
|
||||
def get_invalid(instance) -> List:
|
||||
invalid = []
|
||||
for obj in [obj for obj in instance]:
|
||||
if obj.type == "CAMERA":
|
||||
for obj in instance:
|
||||
if isinstance(obj, bpy.types.Object) and obj.type == "CAMERA":
|
||||
if obj.animation_data and obj.animation_data.action:
|
||||
action = obj.animation_data.action
|
||||
frames_set = set()
|
||||
|
|
@ -45,4 +43,5 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin):
|
|||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
f"Object found in instance is not in Object Mode: {invalid}")
|
||||
f"Camera must have a keyframe at frame 0: {invalid}"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -3,13 +3,14 @@ from typing import List
|
|||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
import openpype.hosts.blender.api.action
|
||||
|
||||
|
||||
class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
|
||||
"""Validate that the current mesh has UV's."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
category = "geometry"
|
||||
|
|
@ -25,7 +26,10 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
|
|||
for uv_layer in obj.data.uv_layers:
|
||||
for polygon in obj.data.polygons:
|
||||
for loop_index in polygon.loop_indices:
|
||||
if not uv_layer.data[loop_index].uv:
|
||||
if (
|
||||
loop_index >= len(uv_layer.data)
|
||||
or not uv_layer.data[loop_index].uv
|
||||
):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
@ -33,20 +37,20 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
|
|||
@classmethod
|
||||
def get_invalid(cls, instance) -> List:
|
||||
invalid = []
|
||||
# TODO (jasper): only check objects in the collection that will be published?
|
||||
for obj in [
|
||||
obj for obj in instance]:
|
||||
try:
|
||||
if obj.type == 'MESH':
|
||||
# Make sure we are in object mode.
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
if not cls.has_uvs(obj):
|
||||
invalid.append(obj)
|
||||
except:
|
||||
continue
|
||||
for obj in instance:
|
||||
if isinstance(obj, bpy.types.Object) and obj.type == 'MESH':
|
||||
if obj.mode != "OBJECT":
|
||||
cls.log.warning(
|
||||
f"Mesh object {obj.name} should be in 'OBJECT' mode"
|
||||
" to be properly checked."
|
||||
)
|
||||
if not cls.has_uvs(obj):
|
||||
invalid.append(obj)
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(f"Meshes found in instance without valid UV's: {invalid}")
|
||||
raise RuntimeError(
|
||||
f"Meshes found in instance without valid UV's: {invalid}"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -3,28 +3,27 @@ from typing import List
|
|||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
import openpype.hosts.blender.api.action
|
||||
|
||||
|
||||
class ValidateMeshNoNegativeScale(pyblish.api.Validator):
|
||||
"""Ensure that meshes don't have a negative scale."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
category = "geometry"
|
||||
label = "Mesh No Negative Scale"
|
||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance) -> List:
|
||||
invalid = []
|
||||
# TODO (jasper): only check objects in the collection that will be published?
|
||||
for obj in [
|
||||
obj for obj in bpy.data.objects if obj.type == 'MESH'
|
||||
]:
|
||||
if any(v < 0 for v in obj.scale):
|
||||
invalid.append(obj)
|
||||
|
||||
for obj in instance:
|
||||
if isinstance(obj, bpy.types.Object) and obj.type == 'MESH':
|
||||
if any(v < 0 for v in obj.scale):
|
||||
invalid.append(obj)
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,9 @@
|
|||
from typing import List
|
||||
|
||||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
import openpype.hosts.blender.api.action
|
||||
|
||||
|
||||
|
|
@ -19,13 +22,13 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin):
|
|||
label = "No Colons in names"
|
||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance) -> List:
|
||||
@staticmethod
|
||||
def get_invalid(instance) -> List:
|
||||
invalid = []
|
||||
for obj in [obj for obj in instance]:
|
||||
for obj in instance:
|
||||
if ':' in obj.name:
|
||||
invalid.append(obj)
|
||||
if obj.type == 'ARMATURE':
|
||||
if isinstance(obj, bpy.types.Object) and obj.type == 'ARMATURE':
|
||||
for bone in obj.data.bones:
|
||||
if ':' in bone.name:
|
||||
invalid.append(obj)
|
||||
|
|
@ -36,4 +39,5 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin):
|
|||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
f"Objects found with colon in name: {invalid}")
|
||||
f"Objects found with colon in name: {invalid}"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
from typing import List
|
||||
|
||||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
import openpype.hosts.blender.api.action
|
||||
|
||||
|
|
@ -10,26 +12,21 @@ class ValidateObjectIsInObjectMode(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.ValidatorOrder - 0.01
|
||||
hosts = ["blender"]
|
||||
families = ["model", "rig", "layout"]
|
||||
category = "geometry"
|
||||
label = "Validate Object Mode"
|
||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||
optional = False
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance) -> List:
|
||||
@staticmethod
|
||||
def get_invalid(instance) -> List:
|
||||
invalid = []
|
||||
for obj in [obj for obj in instance]:
|
||||
try:
|
||||
if obj.type == 'MESH' or obj.type == 'ARMATURE':
|
||||
# Check if the object is in object mode.
|
||||
if not obj.mode == 'OBJECT':
|
||||
invalid.append(obj)
|
||||
except Exception:
|
||||
continue
|
||||
for obj in instance:
|
||||
if isinstance(obj, bpy.types.Object) and obj.mode != "OBJECT":
|
||||
invalid.append(obj)
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
f"Object found in instance is not in Object Mode: {invalid}")
|
||||
f"Object found in instance is not in Object Mode: {invalid}"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
from typing import List
|
||||
|
||||
import mathutils
|
||||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
import openpype.hosts.blender.api.action
|
||||
|
||||
|
||||
|
|
@ -18,7 +20,6 @@ class ValidateTransformZero(pyblish.api.InstancePlugin):
|
|||
order = openpype.api.ValidateContentsOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
category = "geometry"
|
||||
version = (0, 1, 0)
|
||||
label = "Transform Zero"
|
||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||
|
|
@ -28,8 +29,11 @@ class ValidateTransformZero(pyblish.api.InstancePlugin):
|
|||
@classmethod
|
||||
def get_invalid(cls, instance) -> List:
|
||||
invalid = []
|
||||
for obj in [obj for obj in instance]:
|
||||
if obj.matrix_basis != cls._identity:
|
||||
for obj in instance:
|
||||
if (
|
||||
isinstance(obj, bpy.types.Object)
|
||||
and obj.matrix_basis != cls._identity
|
||||
):
|
||||
invalid.append(obj)
|
||||
return invalid
|
||||
|
||||
|
|
@ -37,4 +41,6 @@ class ValidateTransformZero(pyblish.api.InstancePlugin):
|
|||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
f"Object found in instance is not in Object Mode: {invalid}")
|
||||
"Object found in instance has not"
|
||||
f" transform to zero: {invalid}"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -3,8 +3,6 @@ import sys
|
|||
import copy
|
||||
import argparse
|
||||
|
||||
from avalon import io
|
||||
|
||||
import pyblish.api
|
||||
import pyblish.util
|
||||
|
||||
|
|
@ -13,17 +11,16 @@ import openpype
|
|||
import openpype.hosts.celaction
|
||||
from openpype.hosts.celaction import api as celaction
|
||||
from openpype.tools.utils import host_tools
|
||||
from openpype.pipeline import install_openpype_plugins
|
||||
|
||||
log = Logger().get_logger("Celaction_cli_publisher")
|
||||
|
||||
log = Logger.get_logger("Celaction_cli_publisher")
|
||||
|
||||
publish_host = "celaction"
|
||||
|
||||
HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.celaction.__file__))
|
||||
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
|
||||
def cli():
|
||||
|
|
@ -74,7 +71,7 @@ def main():
|
|||
_prepare_publish_environments()
|
||||
|
||||
# Registers pype's Global pyblish plugins
|
||||
openpype.install()
|
||||
install_openpype_plugins()
|
||||
|
||||
if os.path.exists(PUBLISH_PATH):
|
||||
log.info(f"Registering path: {PUBLISH_PATH}")
|
||||
|
|
|
|||
|
|
@ -1,10 +1,15 @@
|
|||
import os
|
||||
import collections
|
||||
from pprint import pformat
|
||||
|
||||
import pyblish.api
|
||||
from avalon import io
|
||||
|
||||
from pprint import pformat
|
||||
from openpype.client import (
|
||||
get_subsets,
|
||||
get_last_versions,
|
||||
get_representations
|
||||
)
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
|
||||
class AppendCelactionAudio(pyblish.api.ContextPlugin):
|
||||
|
|
@ -60,10 +65,10 @@ class AppendCelactionAudio(pyblish.api.ContextPlugin):
|
|||
"""
|
||||
|
||||
# Query all subsets for asset
|
||||
subset_docs = io.find({
|
||||
"type": "subset",
|
||||
"parent": asset_doc["_id"]
|
||||
})
|
||||
project_name = legacy_io.active_project()
|
||||
subset_docs = get_subsets(
|
||||
project_name, asset_ids=[asset_doc["_id"]], fields=["_id"]
|
||||
)
|
||||
# Collect all subset ids
|
||||
subset_ids = [
|
||||
subset_doc["_id"]
|
||||
|
|
@ -76,37 +81,19 @@ class AppendCelactionAudio(pyblish.api.ContextPlugin):
|
|||
"Try this for start `r'.*'`: asset: `{}`"
|
||||
).format(asset_doc["name"])
|
||||
|
||||
# Last version aggregation
|
||||
pipeline = [
|
||||
# Find all versions of those subsets
|
||||
{"$match": {
|
||||
"type": "version",
|
||||
"parent": {"$in": subset_ids}
|
||||
}},
|
||||
# Sorting versions all together
|
||||
{"$sort": {"name": 1}},
|
||||
# Group them by "parent", but only take the last
|
||||
{"$group": {
|
||||
"_id": "$parent",
|
||||
"_version_id": {"$last": "$_id"},
|
||||
"name": {"$last": "$name"}
|
||||
}}
|
||||
]
|
||||
last_versions_by_subset_id = dict()
|
||||
for doc in io.aggregate(pipeline):
|
||||
doc["parent"] = doc["_id"]
|
||||
doc["_id"] = doc.pop("_version_id")
|
||||
last_versions_by_subset_id[doc["parent"]] = doc
|
||||
last_versions_by_subset_id = get_last_versions(
|
||||
project_name, subset_ids, fields=["_id", "parent"]
|
||||
)
|
||||
|
||||
version_docs_by_id = {}
|
||||
for version_doc in last_versions_by_subset_id.values():
|
||||
version_docs_by_id[version_doc["_id"]] = version_doc
|
||||
|
||||
repre_docs = io.find({
|
||||
"type": "representation",
|
||||
"parent": {"$in": list(version_docs_by_id.keys())},
|
||||
"name": {"$in": representations}
|
||||
})
|
||||
repre_docs = get_representations(
|
||||
project_name,
|
||||
version_ids=version_docs_by_id.keys(),
|
||||
representation_names=representations
|
||||
)
|
||||
repre_docs_by_version_id = collections.defaultdict(list)
|
||||
for repre_doc in repre_docs:
|
||||
version_id = repre_doc["parent"]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
from avalon import api
|
||||
import pyblish.api
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
|
||||
class CollectCelactionInstances(pyblish.api.ContextPlugin):
|
||||
|
|
@ -10,7 +10,7 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
|
|||
order = pyblish.api.CollectorOrder + 0.1
|
||||
|
||||
def process(self, context):
|
||||
task = api.Session["AVALON_TASK"]
|
||||
task = legacy_io.Session["AVALON_TASK"]
|
||||
current_file = context.data["currentFile"]
|
||||
staging_dir = os.path.dirname(current_file)
|
||||
scene_file = os.path.basename(current_file)
|
||||
|
|
|
|||
|
|
@ -11,10 +11,8 @@ from .constants import (
|
|||
from .lib import (
|
||||
CTX,
|
||||
FlameAppFramework,
|
||||
get_project_manager,
|
||||
get_current_project,
|
||||
get_current_sequence,
|
||||
create_bin,
|
||||
create_segment_data_marker,
|
||||
get_segment_data_marker,
|
||||
set_segment_data_marker,
|
||||
|
|
@ -29,7 +27,11 @@ from .lib import (
|
|||
get_frame_from_filename,
|
||||
get_padding_from_filename,
|
||||
maintained_object_duplication,
|
||||
get_clip_segment
|
||||
maintained_temp_file_path,
|
||||
get_clip_segment,
|
||||
get_batch_group_from_desktop,
|
||||
MediaInfoFile,
|
||||
TimeEffectMetadata
|
||||
)
|
||||
from .utils import (
|
||||
setup,
|
||||
|
|
@ -56,7 +58,6 @@ from .plugin import (
|
|||
PublishableClip,
|
||||
ClipLoader,
|
||||
OpenClipSolver
|
||||
|
||||
)
|
||||
from .workio import (
|
||||
open_file,
|
||||
|
|
@ -71,6 +72,10 @@ from .render_utils import (
|
|||
get_preset_path_by_xml_name,
|
||||
modify_preset_file
|
||||
)
|
||||
from .batch_utils import (
|
||||
create_batch_group,
|
||||
create_batch_group_conent
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# constants
|
||||
|
|
@ -83,10 +88,8 @@ __all__ = [
|
|||
# lib
|
||||
"CTX",
|
||||
"FlameAppFramework",
|
||||
"get_project_manager",
|
||||
"get_current_project",
|
||||
"get_current_sequence",
|
||||
"create_bin",
|
||||
"create_segment_data_marker",
|
||||
"get_segment_data_marker",
|
||||
"set_segment_data_marker",
|
||||
|
|
@ -101,7 +104,11 @@ __all__ = [
|
|||
"get_frame_from_filename",
|
||||
"get_padding_from_filename",
|
||||
"maintained_object_duplication",
|
||||
"maintained_temp_file_path",
|
||||
"get_clip_segment",
|
||||
"get_batch_group_from_desktop",
|
||||
"MediaInfoFile",
|
||||
"TimeEffectMetadata",
|
||||
|
||||
# pipeline
|
||||
"install",
|
||||
|
|
@ -142,5 +149,9 @@ __all__ = [
|
|||
# render utils
|
||||
"export_clip",
|
||||
"get_preset_path_by_xml_name",
|
||||
"modify_preset_file"
|
||||
"modify_preset_file",
|
||||
|
||||
# batch utils
|
||||
"create_batch_group",
|
||||
"create_batch_group_conent"
|
||||
]
|
||||
|
|
|
|||
151
openpype/hosts/flame/api/batch_utils.py
Normal file
151
openpype/hosts/flame/api/batch_utils.py
Normal file
|
|
@ -0,0 +1,151 @@
|
|||
import flame
|
||||
|
||||
|
||||
def create_batch_group(
|
||||
name,
|
||||
frame_start,
|
||||
frame_duration,
|
||||
update_batch_group=None,
|
||||
**kwargs
|
||||
):
|
||||
"""Create Batch Group in active project's Desktop
|
||||
|
||||
Args:
|
||||
name (str): name of batch group to be created
|
||||
frame_start (int): start frame of batch
|
||||
frame_end (int): end frame of batch
|
||||
update_batch_group (PyBatch)[optional]: batch group to update
|
||||
|
||||
Return:
|
||||
PyBatch: active flame batch group
|
||||
"""
|
||||
# make sure some batch obj is present
|
||||
batch_group = update_batch_group or flame.batch
|
||||
|
||||
schematic_reels = kwargs.get("shematic_reels") or ['LoadedReel1']
|
||||
shelf_reels = kwargs.get("shelf_reels") or ['ShelfReel1']
|
||||
|
||||
handle_start = kwargs.get("handleStart") or 0
|
||||
handle_end = kwargs.get("handleEnd") or 0
|
||||
|
||||
frame_start -= handle_start
|
||||
frame_duration += handle_start + handle_end
|
||||
|
||||
if not update_batch_group:
|
||||
# Create batch group with name, start_frame value, duration value,
|
||||
# set of schematic reel names, set of shelf reel names
|
||||
batch_group = batch_group.create_batch_group(
|
||||
name,
|
||||
start_frame=frame_start,
|
||||
duration=frame_duration,
|
||||
reels=schematic_reels,
|
||||
shelf_reels=shelf_reels
|
||||
)
|
||||
else:
|
||||
batch_group.name = name
|
||||
batch_group.start_frame = frame_start
|
||||
batch_group.duration = frame_duration
|
||||
|
||||
# add reels to batch group
|
||||
_add_reels_to_batch_group(
|
||||
batch_group, schematic_reels, shelf_reels)
|
||||
|
||||
# TODO: also update write node if there is any
|
||||
# TODO: also update loaders to start from correct frameStart
|
||||
|
||||
if kwargs.get("switch_batch_tab"):
|
||||
# use this command to switch to the batch tab
|
||||
batch_group.go_to()
|
||||
|
||||
return batch_group
|
||||
|
||||
|
||||
def _add_reels_to_batch_group(batch_group, reels, shelf_reels):
|
||||
# update or create defined reels
|
||||
# helper variables
|
||||
reel_names = [
|
||||
r.name.get_value()
|
||||
for r in batch_group.reels
|
||||
]
|
||||
shelf_reel_names = [
|
||||
r.name.get_value()
|
||||
for r in batch_group.shelf_reels
|
||||
]
|
||||
# add schematic reels
|
||||
for _r in reels:
|
||||
if _r in reel_names:
|
||||
continue
|
||||
batch_group.create_reel(_r)
|
||||
|
||||
# add shelf reels
|
||||
for _sr in shelf_reels:
|
||||
if _sr in shelf_reel_names:
|
||||
continue
|
||||
batch_group.create_shelf_reel(_sr)
|
||||
|
||||
|
||||
def create_batch_group_conent(batch_nodes, batch_links, batch_group=None):
|
||||
"""Creating batch group with links
|
||||
|
||||
Args:
|
||||
batch_nodes (list of dict): each dict is node definition
|
||||
batch_links (list of dict): each dict is link definition
|
||||
batch_group (PyBatch, optional): batch group. Defaults to None.
|
||||
|
||||
Return:
|
||||
dict: all batch nodes {name or id: PyNode}
|
||||
"""
|
||||
# make sure some batch obj is present
|
||||
batch_group = batch_group or flame.batch
|
||||
all_batch_nodes = {
|
||||
b.name.get_value(): b
|
||||
for b in batch_group.nodes
|
||||
}
|
||||
for node in batch_nodes:
|
||||
# NOTE: node_props needs to be ideally OrederDict type
|
||||
node_id, node_type, node_props = (
|
||||
node["id"], node["type"], node["properties"])
|
||||
|
||||
# get node name for checking if exists
|
||||
node_name = node_props.pop("name", None) or node_id
|
||||
|
||||
if all_batch_nodes.get(node_name):
|
||||
# update existing batch node
|
||||
batch_node = all_batch_nodes[node_name]
|
||||
else:
|
||||
# create new batch node
|
||||
batch_node = batch_group.create_node(node_type)
|
||||
|
||||
# set name
|
||||
batch_node.name.set_value(node_name)
|
||||
|
||||
# set attributes found in node props
|
||||
for key, value in node_props.items():
|
||||
if not hasattr(batch_node, key):
|
||||
continue
|
||||
setattr(batch_node, key, value)
|
||||
|
||||
# add created node for possible linking
|
||||
all_batch_nodes[node_id] = batch_node
|
||||
|
||||
# link nodes to each other
|
||||
for link in batch_links:
|
||||
_from_n, _to_n = link["from_node"], link["to_node"]
|
||||
|
||||
# check if all linking nodes are available
|
||||
if not all([
|
||||
all_batch_nodes.get(_from_n["id"]),
|
||||
all_batch_nodes.get(_to_n["id"])
|
||||
]):
|
||||
continue
|
||||
|
||||
# link nodes in defined link
|
||||
batch_group.connect_nodes(
|
||||
all_batch_nodes[_from_n["id"]], _from_n["connector"],
|
||||
all_batch_nodes[_to_n["id"]], _to_n["connector"]
|
||||
)
|
||||
|
||||
# sort batch nodes
|
||||
batch_group.organize()
|
||||
|
||||
return all_batch_nodes
|
||||
|
|
@ -3,7 +3,14 @@ import os
|
|||
import re
|
||||
import json
|
||||
import pickle
|
||||
import clique
|
||||
import tempfile
|
||||
import traceback
|
||||
import itertools
|
||||
import contextlib
|
||||
import xml.etree.cElementTree as cET
|
||||
from copy import deepcopy, copy
|
||||
from xml.etree import ElementTree as ET
|
||||
from pprint import pformat
|
||||
from .constants import (
|
||||
MARKER_COLOR,
|
||||
|
|
@ -12,9 +19,10 @@ from .constants import (
|
|||
COLOR_MAP,
|
||||
MARKER_PUBLISH_DEFAULT
|
||||
)
|
||||
from openpype.api import Logger
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
import openpype.api as openpype
|
||||
|
||||
log = openpype.Logger.get_logger(__name__)
|
||||
|
||||
FRAME_PATTERN = re.compile(r"[\._](\d+)[\.]")
|
||||
|
||||
|
|
@ -227,16 +235,6 @@ class FlameAppFramework(object):
|
|||
return True
|
||||
|
||||
|
||||
def get_project_manager():
|
||||
# TODO: get_project_manager
|
||||
return
|
||||
|
||||
|
||||
def get_media_storage():
|
||||
# TODO: get_media_storage
|
||||
return
|
||||
|
||||
|
||||
def get_current_project():
|
||||
import flame
|
||||
return flame.project.current_project
|
||||
|
|
@ -266,20 +264,16 @@ def get_current_sequence(selection):
|
|||
return process_timeline
|
||||
|
||||
|
||||
def create_bin(name, root=None):
|
||||
# TODO: create_bin
|
||||
return
|
||||
|
||||
|
||||
def rescan_hooks():
|
||||
import flame
|
||||
try:
|
||||
flame.execute_shortcut('Rescan Python Hooks')
|
||||
flame.execute_shortcut("Rescan Python Hooks")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def get_metadata(project_name, _log=None):
|
||||
# TODO: can be replaced by MediaInfoFile class method
|
||||
from adsk.libwiretapPythonClientAPI import (
|
||||
WireTapClient,
|
||||
WireTapServerHandle,
|
||||
|
|
@ -568,7 +562,7 @@ def get_segment_attributes(segment):
|
|||
if not hasattr(segment, attr_name):
|
||||
continue
|
||||
attr = getattr(segment, attr_name)
|
||||
segment_attrs_data[attr] = str(attr).replace("+", ":")
|
||||
segment_attrs_data[attr_name] = str(attr).replace("+", ":")
|
||||
|
||||
if attr_name in ["record_in", "record_out"]:
|
||||
clip_data[attr_name] = attr.relative_frame
|
||||
|
|
@ -704,6 +698,25 @@ def maintained_object_duplication(item):
|
|||
flame.delete(duplicate)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_temp_file_path(suffix=None):
|
||||
_suffix = suffix or ""
|
||||
|
||||
try:
|
||||
# Store dumped json to temporary file
|
||||
temporary_file = tempfile.mktemp(
|
||||
suffix=_suffix, prefix="flame_maintained_")
|
||||
yield temporary_file.replace("\\", "/")
|
||||
|
||||
except IOError as _error:
|
||||
raise IOError(
|
||||
"Not able to create temp json file: {}".format(_error))
|
||||
|
||||
finally:
|
||||
# Remove the temporary json
|
||||
os.remove(temporary_file)
|
||||
|
||||
|
||||
def get_clip_segment(flame_clip):
|
||||
name = flame_clip.name.get_value()
|
||||
version = flame_clip.versions[0]
|
||||
|
|
@ -717,3 +730,542 @@ def get_clip_segment(flame_clip):
|
|||
raise ValueError("Clip `{}` has too many segments!".format(name))
|
||||
|
||||
return segments[0]
|
||||
|
||||
|
||||
def get_batch_group_from_desktop(name):
|
||||
project = get_current_project()
|
||||
project_desktop = project.current_workspace.desktop
|
||||
|
||||
for bgroup in project_desktop.batch_groups:
|
||||
if bgroup.name.get_value() in name:
|
||||
return bgroup
|
||||
|
||||
|
||||
class MediaInfoFile(object):
|
||||
"""Class to get media info file clip data
|
||||
|
||||
Raises:
|
||||
IOError: MEDIA_SCRIPT_PATH path doesn't exists
|
||||
TypeError: Not able to generate clip xml data file
|
||||
ET.ParseError: Missing clip in xml clip data
|
||||
IOError: Not able to save xml clip data to file
|
||||
|
||||
Attributes:
|
||||
str: `MEDIA_SCRIPT_PATH` path to flame binary
|
||||
logging.Logger: `log` logger
|
||||
|
||||
TODO: add method for getting metadata to dict
|
||||
"""
|
||||
MEDIA_SCRIPT_PATH = "/opt/Autodesk/mio/current/dl_get_media_info"
|
||||
|
||||
log = log
|
||||
|
||||
_clip_data = None
|
||||
_start_frame = None
|
||||
_fps = None
|
||||
_drop_mode = None
|
||||
_file_pattern = None
|
||||
|
||||
def __init__(self, path, **kwargs):
|
||||
|
||||
# replace log if any
|
||||
if kwargs.get("logger"):
|
||||
self.log = kwargs["logger"]
|
||||
|
||||
# test if `dl_get_media_info` paht exists
|
||||
self._validate_media_script_path()
|
||||
|
||||
# derivate other feed variables
|
||||
feed_basename = os.path.basename(path)
|
||||
feed_dir = os.path.dirname(path)
|
||||
feed_ext = os.path.splitext(feed_basename)[1][1:].lower()
|
||||
|
||||
with maintained_temp_file_path(".clip") as tmp_path:
|
||||
self.log.info("Temp File: {}".format(tmp_path))
|
||||
self._generate_media_info_file(tmp_path, feed_ext, feed_dir)
|
||||
|
||||
# get collection containing feed_basename from path
|
||||
self.file_pattern = self._get_collection(
|
||||
feed_basename, feed_dir, feed_ext)
|
||||
|
||||
if (
|
||||
not self.file_pattern
|
||||
and os.path.exists(os.path.join(feed_dir, feed_basename))
|
||||
):
|
||||
self.file_pattern = feed_basename
|
||||
|
||||
# get clip data and make them single if there is multiple
|
||||
# clips data
|
||||
xml_data = self._make_single_clip_media_info(
|
||||
tmp_path, feed_basename, self.file_pattern)
|
||||
self.log.debug("xml_data: {}".format(xml_data))
|
||||
self.log.debug("type: {}".format(type(xml_data)))
|
||||
|
||||
# get all time related data and assign them
|
||||
self._get_time_info_from_origin(xml_data)
|
||||
self.log.debug("start_frame: {}".format(self.start_frame))
|
||||
self.log.debug("fps: {}".format(self.fps))
|
||||
self.log.debug("drop frame: {}".format(self.drop_mode))
|
||||
self.clip_data = xml_data
|
||||
|
||||
def _get_collection(self, feed_basename, feed_dir, feed_ext):
|
||||
""" Get collection string
|
||||
|
||||
Args:
|
||||
feed_basename (str): file base name
|
||||
feed_dir (str): file's directory
|
||||
feed_ext (str): file extension
|
||||
|
||||
Raises:
|
||||
AttributeError: feed_ext is not matching feed_basename
|
||||
|
||||
Returns:
|
||||
str: collection basename with range of sequence
|
||||
"""
|
||||
partialname = self._separate_file_head(feed_basename, feed_ext)
|
||||
self.log.debug("__ partialname: {}".format(partialname))
|
||||
|
||||
# make sure partial input basename is having correct extensoon
|
||||
if not partialname:
|
||||
raise AttributeError(
|
||||
"Wrong input attributes. Basename - {}, Ext - {}".format(
|
||||
feed_basename, feed_ext
|
||||
)
|
||||
)
|
||||
|
||||
# get all related files
|
||||
files = [
|
||||
f for f in os.listdir(feed_dir)
|
||||
if partialname == self._separate_file_head(f, feed_ext)
|
||||
]
|
||||
|
||||
# ignore reminders as we dont need them
|
||||
collections = clique.assemble(files)[0]
|
||||
|
||||
# in case no collection found return None
|
||||
# it is probably just single file
|
||||
if not collections:
|
||||
return
|
||||
|
||||
# we expect only one collection
|
||||
collection = collections[0]
|
||||
|
||||
self.log.debug("__ collection: {}".format(collection))
|
||||
|
||||
if collection.is_contiguous():
|
||||
return self._format_collection(collection)
|
||||
|
||||
# add `[` in front to make sure it want capture
|
||||
# shot name with the same number
|
||||
number_from_path = self._separate_number(feed_basename, feed_ext)
|
||||
search_number_pattern = "[" + number_from_path
|
||||
# convert to multiple collections
|
||||
_continues_colls = collection.separate()
|
||||
for _coll in _continues_colls:
|
||||
coll_to_text = self._format_collection(
|
||||
_coll, len(number_from_path))
|
||||
self.log.debug("__ coll_to_text: {}".format(coll_to_text))
|
||||
if search_number_pattern in coll_to_text:
|
||||
return coll_to_text
|
||||
|
||||
@staticmethod
|
||||
def _format_collection(collection, padding=None):
|
||||
padding = padding or collection.padding
|
||||
# if no holes then return collection
|
||||
head = collection.format("{head}")
|
||||
tail = collection.format("{tail}")
|
||||
range_template = "[{{:0{0}d}}-{{:0{0}d}}]".format(
|
||||
padding)
|
||||
ranges = range_template.format(
|
||||
min(collection.indexes),
|
||||
max(collection.indexes)
|
||||
)
|
||||
# if no holes then return collection
|
||||
return "{}{}{}".format(head, ranges, tail)
|
||||
|
||||
def _separate_file_head(self, basename, extension):
|
||||
""" Get only head with out sequence and extension
|
||||
|
||||
Args:
|
||||
basename (str): file base name
|
||||
extension (str): file extension
|
||||
|
||||
Returns:
|
||||
str: file head
|
||||
"""
|
||||
# in case sequence file
|
||||
found = re.findall(
|
||||
r"(.*)[._][\d]*(?=.{})".format(extension),
|
||||
basename,
|
||||
)
|
||||
if found:
|
||||
return found.pop()
|
||||
|
||||
# in case single file
|
||||
name, ext = os.path.splitext(basename)
|
||||
|
||||
if extension == ext[1:]:
|
||||
return name
|
||||
|
||||
def _separate_number(self, basename, extension):
|
||||
""" Get only sequence number as string
|
||||
|
||||
Args:
|
||||
basename (str): file base name
|
||||
extension (str): file extension
|
||||
|
||||
Returns:
|
||||
str: number with padding
|
||||
"""
|
||||
# in case sequence file
|
||||
found = re.findall(
|
||||
r"[._]([\d]*)(?=.{})".format(extension),
|
||||
basename,
|
||||
)
|
||||
if found:
|
||||
return found.pop()
|
||||
|
||||
@property
|
||||
def clip_data(self):
|
||||
"""Clip's xml clip data
|
||||
|
||||
Returns:
|
||||
xml.etree.ElementTree: xml data
|
||||
"""
|
||||
return self._clip_data
|
||||
|
||||
@clip_data.setter
|
||||
def clip_data(self, data):
|
||||
self._clip_data = data
|
||||
|
||||
@property
|
||||
def start_frame(self):
|
||||
""" Clip's starting frame found in timecode
|
||||
|
||||
Returns:
|
||||
int: number of frames
|
||||
"""
|
||||
return self._start_frame
|
||||
|
||||
@start_frame.setter
|
||||
def start_frame(self, number):
|
||||
self._start_frame = int(number)
|
||||
|
||||
@property
|
||||
def fps(self):
|
||||
""" Clip's frame rate
|
||||
|
||||
Returns:
|
||||
float: frame rate
|
||||
"""
|
||||
return self._fps
|
||||
|
||||
@fps.setter
|
||||
def fps(self, fl_number):
|
||||
self._fps = float(fl_number)
|
||||
|
||||
@property
|
||||
def drop_mode(self):
|
||||
""" Clip's drop frame mode
|
||||
|
||||
Returns:
|
||||
str: drop frame flag
|
||||
"""
|
||||
return self._drop_mode
|
||||
|
||||
@drop_mode.setter
|
||||
def drop_mode(self, text):
|
||||
self._drop_mode = str(text)
|
||||
|
||||
@property
|
||||
def file_pattern(self):
|
||||
"""Clips file patter
|
||||
|
||||
Returns:
|
||||
str: file pattern. ex. file.[1-2].exr
|
||||
"""
|
||||
return self._file_pattern
|
||||
|
||||
@file_pattern.setter
|
||||
def file_pattern(self, fpattern):
|
||||
self._file_pattern = fpattern
|
||||
|
||||
def _validate_media_script_path(self):
|
||||
if not os.path.isfile(self.MEDIA_SCRIPT_PATH):
|
||||
raise IOError("Media Scirpt does not exist: `{}`".format(
|
||||
self.MEDIA_SCRIPT_PATH))
|
||||
|
||||
def _generate_media_info_file(self, fpath, feed_ext, feed_dir):
|
||||
""" Generate media info xml .clip file
|
||||
|
||||
Args:
|
||||
fpath (str): .clip file path
|
||||
feed_ext (str): file extension to be filtered
|
||||
feed_dir (str): look up directory
|
||||
|
||||
Raises:
|
||||
TypeError: Type error if it fails
|
||||
"""
|
||||
# Create cmd arguments for gettig xml file info file
|
||||
cmd_args = [
|
||||
self.MEDIA_SCRIPT_PATH,
|
||||
"-e", feed_ext,
|
||||
"-o", fpath,
|
||||
feed_dir
|
||||
]
|
||||
|
||||
try:
|
||||
# execute creation of clip xml template data
|
||||
openpype.run_subprocess(cmd_args)
|
||||
except TypeError as error:
|
||||
raise TypeError(
|
||||
"Error creating `{}` due: {}".format(fpath, error))
|
||||
|
||||
def _make_single_clip_media_info(self, fpath, feed_basename, path_pattern):
|
||||
""" Separate only relative clip object form .clip file
|
||||
|
||||
Args:
|
||||
fpath (str): clip file path
|
||||
feed_basename (str): search basename
|
||||
path_pattern (str): search file pattern (file.[1-2].exr)
|
||||
|
||||
Raises:
|
||||
ET.ParseError: if nothing found
|
||||
|
||||
Returns:
|
||||
ET.Element: xml element data of matching clip
|
||||
"""
|
||||
with open(fpath) as f:
|
||||
lines = f.readlines()
|
||||
_added_root = itertools.chain(
|
||||
"<root>", deepcopy(lines)[1:], "</root>")
|
||||
new_root = ET.fromstringlist(_added_root)
|
||||
|
||||
# find the clip which is matching to my input name
|
||||
xml_clips = new_root.findall("clip")
|
||||
matching_clip = None
|
||||
for xml_clip in xml_clips:
|
||||
clip_name = xml_clip.find("name").text
|
||||
self.log.debug("__ clip_name: `{}`".format(clip_name))
|
||||
if clip_name not in feed_basename:
|
||||
continue
|
||||
|
||||
# test path pattern
|
||||
for out_track in xml_clip.iter("track"):
|
||||
for out_feed in out_track.iter("feed"):
|
||||
for span in out_feed.iter("span"):
|
||||
# start frame
|
||||
span_path = span.find("path")
|
||||
self.log.debug(
|
||||
"__ span_path.text: {}, path_pattern: {}".format(
|
||||
span_path.text, path_pattern
|
||||
)
|
||||
)
|
||||
if path_pattern in span_path.text:
|
||||
matching_clip = xml_clip
|
||||
|
||||
if matching_clip is None:
|
||||
# return warning there is missing clip
|
||||
raise ET.ParseError(
|
||||
"Missing clip in `{}`. Available clips {}".format(
|
||||
feed_basename, [
|
||||
xml_clip.find("name").text
|
||||
for xml_clip in xml_clips
|
||||
]
|
||||
))
|
||||
|
||||
return matching_clip
|
||||
|
||||
def _get_time_info_from_origin(self, xml_data):
|
||||
"""Set time info to class attributes
|
||||
|
||||
Args:
|
||||
xml_data (ET.Element): clip data
|
||||
"""
|
||||
try:
|
||||
for out_track in xml_data.iter("track"):
|
||||
for out_feed in out_track.iter("feed"):
|
||||
# start frame
|
||||
out_feed_nb_ticks_obj = out_feed.find(
|
||||
"startTimecode/nbTicks")
|
||||
self.start_frame = out_feed_nb_ticks_obj.text
|
||||
|
||||
# fps
|
||||
out_feed_fps_obj = out_feed.find(
|
||||
"startTimecode/rate")
|
||||
self.fps = out_feed_fps_obj.text
|
||||
|
||||
# drop frame mode
|
||||
out_feed_drop_mode_obj = out_feed.find(
|
||||
"startTimecode/dropMode")
|
||||
self.drop_mode = out_feed_drop_mode_obj.text
|
||||
break
|
||||
except Exception as msg:
|
||||
self.log.warning(msg)
|
||||
|
||||
@staticmethod
|
||||
def write_clip_data_to_file(fpath, xml_element_data):
|
||||
""" Write xml element of clip data to file
|
||||
|
||||
Args:
|
||||
fpath (string): file path
|
||||
xml_element_data (xml.etree.ElementTree.Element): xml data
|
||||
|
||||
Raises:
|
||||
IOError: If data could not be written to file
|
||||
"""
|
||||
try:
|
||||
# save it as new file
|
||||
tree = cET.ElementTree(xml_element_data)
|
||||
tree.write(
|
||||
fpath, xml_declaration=True,
|
||||
method="xml", encoding="UTF-8"
|
||||
)
|
||||
except IOError as error:
|
||||
raise IOError(
|
||||
"Not able to write data to file: {}".format(error))
|
||||
|
||||
|
||||
class TimeEffectMetadata(object):
|
||||
log = log
|
||||
_data = {}
|
||||
_retime_modes = {
|
||||
0: "speed",
|
||||
1: "timewarp",
|
||||
2: "duration"
|
||||
}
|
||||
|
||||
def __init__(self, segment, logger=None):
|
||||
if logger:
|
||||
self.log = logger
|
||||
|
||||
self._data = self._get_metadata(segment)
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
""" Returns timewarp effect data
|
||||
|
||||
Returns:
|
||||
dict: retime data
|
||||
"""
|
||||
return self._data
|
||||
|
||||
def _get_metadata(self, segment):
|
||||
effects = segment.effects or []
|
||||
for effect in effects:
|
||||
if effect.type == "Timewarp":
|
||||
with maintained_temp_file_path(".timewarp_node") as tmp_path:
|
||||
self.log.info("Temp File: {}".format(tmp_path))
|
||||
effect.save_setup(tmp_path)
|
||||
return self._get_attributes_from_xml(tmp_path)
|
||||
|
||||
return {}
|
||||
|
||||
def _get_attributes_from_xml(self, tmp_path):
|
||||
with open(tmp_path, "r") as tw_setup_file:
|
||||
tw_setup_string = tw_setup_file.read()
|
||||
tw_setup_file.close()
|
||||
|
||||
tw_setup_xml = ET.fromstring(tw_setup_string)
|
||||
tw_setup = self._dictify(tw_setup_xml)
|
||||
# pprint(tw_setup)
|
||||
try:
|
||||
tw_setup_state = tw_setup["Setup"]["State"][0]
|
||||
mode = int(
|
||||
tw_setup_state["TW_RetimerMode"][0]["_text"]
|
||||
)
|
||||
r_data = {
|
||||
"type": self._retime_modes[mode],
|
||||
"effectStart": int(
|
||||
tw_setup["Setup"]["Base"][0]["Range"][0]["Start"]),
|
||||
"effectEnd": int(
|
||||
tw_setup["Setup"]["Base"][0]["Range"][0]["End"])
|
||||
}
|
||||
|
||||
if mode == 0: # speed
|
||||
r_data[self._retime_modes[mode]] = float(
|
||||
tw_setup_state["TW_Speed"]
|
||||
[0]["Channel"][0]["Value"][0]["_text"]
|
||||
) / 100
|
||||
elif mode == 1: # timewarp
|
||||
print("timing")
|
||||
r_data[self._retime_modes[mode]] = self._get_anim_keys(
|
||||
tw_setup_state["TW_Timing"]
|
||||
)
|
||||
elif mode == 2: # duration
|
||||
r_data[self._retime_modes[mode]] = {
|
||||
"start": {
|
||||
"source": int(
|
||||
tw_setup_state["TW_DurationTiming"][0]["Channel"]
|
||||
[0]["KFrames"][0]["Key"][0]["Value"][0]["_text"]
|
||||
),
|
||||
"timeline": int(
|
||||
tw_setup_state["TW_DurationTiming"][0]["Channel"]
|
||||
[0]["KFrames"][0]["Key"][0]["Frame"][0]["_text"]
|
||||
)
|
||||
},
|
||||
"end": {
|
||||
"source": int(
|
||||
tw_setup_state["TW_DurationTiming"][0]["Channel"]
|
||||
[0]["KFrames"][0]["Key"][1]["Value"][0]["_text"]
|
||||
),
|
||||
"timeline": int(
|
||||
tw_setup_state["TW_DurationTiming"][0]["Channel"]
|
||||
[0]["KFrames"][0]["Key"][1]["Frame"][0]["_text"]
|
||||
)
|
||||
}
|
||||
}
|
||||
except Exception:
|
||||
lines = traceback.format_exception(*sys.exc_info())
|
||||
self.log.error("\n".join(lines))
|
||||
return
|
||||
|
||||
return r_data
|
||||
|
||||
def _get_anim_keys(self, setup_cat, index=None):
|
||||
return_data = {
|
||||
"extrapolation": (
|
||||
setup_cat[0]["Channel"][0]["Extrap"][0]["_text"]
|
||||
),
|
||||
"animKeys": []
|
||||
}
|
||||
for key in setup_cat[0]["Channel"][0]["KFrames"][0]["Key"]:
|
||||
if index and int(key["Index"]) != index:
|
||||
continue
|
||||
key_data = {
|
||||
"source": float(key["Value"][0]["_text"]),
|
||||
"timeline": float(key["Frame"][0]["_text"]),
|
||||
"index": int(key["Index"]),
|
||||
"curveMode": key["CurveMode"][0]["_text"],
|
||||
"curveOrder": key["CurveOrder"][0]["_text"]
|
||||
}
|
||||
if key.get("TangentMode"):
|
||||
key_data["tangentMode"] = key["TangentMode"][0]["_text"]
|
||||
|
||||
return_data["animKeys"].append(key_data)
|
||||
|
||||
return return_data
|
||||
|
||||
def _dictify(self, xml_, root=True):
|
||||
""" Convert xml object to dictionary
|
||||
|
||||
Args:
|
||||
xml_ (xml.etree.ElementTree.Element): xml data
|
||||
root (bool, optional): is root available. Defaults to True.
|
||||
|
||||
Returns:
|
||||
dict: dictionarized xml
|
||||
"""
|
||||
|
||||
if root:
|
||||
return {xml_.tag: self._dictify(xml_, False)}
|
||||
|
||||
d = copy(xml_.attrib)
|
||||
if xml_.text:
|
||||
d["_text"] = xml_.text
|
||||
|
||||
for x in xml_.findall("./*"):
|
||||
if x.tag not in d:
|
||||
d[x.tag] = []
|
||||
d[x.tag].append(self._dictify(x, False))
|
||||
return d
|
||||
|
|
|
|||
|
|
@ -1,24 +1,19 @@
|
|||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from xml.etree import ElementTree as ET
|
||||
import six
|
||||
import qargparse
|
||||
from Qt import QtWidgets, QtCore
|
||||
import openpype.api as openpype
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
LoaderPlugin,
|
||||
)
|
||||
from openpype import style
|
||||
from . import (
|
||||
lib as flib,
|
||||
pipeline as fpipeline,
|
||||
constants
|
||||
)
|
||||
|
||||
from copy import deepcopy
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from Qt import QtCore, QtWidgets
|
||||
|
||||
import openpype.api as openpype
|
||||
import qargparse
|
||||
from openpype import style
|
||||
from openpype.pipeline import LegacyCreator, LoaderPlugin
|
||||
|
||||
from . import constants
|
||||
from . import lib as flib
|
||||
from . import pipeline as fpipeline
|
||||
|
||||
log = openpype.Logger.get_logger(__name__)
|
||||
|
||||
|
|
@ -365,6 +360,7 @@ class PublishableClip:
|
|||
driving_layer_default = ""
|
||||
index_from_segment_default = False
|
||||
use_shot_name_default = False
|
||||
include_handles_default = False
|
||||
|
||||
def __init__(self, segment, **kwargs):
|
||||
self.rename_index = kwargs["rename_index"]
|
||||
|
|
@ -498,6 +494,8 @@ class PublishableClip:
|
|||
"reviewTrack", {}).get("value") or self.review_track_default
|
||||
self.audio = self.ui_inputs.get(
|
||||
"audio", {}).get("value") or False
|
||||
self.include_handles = self.ui_inputs.get(
|
||||
"includeHandles", {}).get("value") or self.include_handles_default
|
||||
|
||||
# build subset name from layer name
|
||||
if self.subset_name == "[ track name ]":
|
||||
|
|
@ -660,8 +658,8 @@ class PublishableClip:
|
|||
|
||||
|
||||
# Publishing plugin functions
|
||||
# Loader plugin functions
|
||||
|
||||
# Loader plugin functions
|
||||
class ClipLoader(LoaderPlugin):
|
||||
"""A basic clip loader for Flame
|
||||
|
||||
|
|
@ -681,50 +679,52 @@ class ClipLoader(LoaderPlugin):
|
|||
]
|
||||
|
||||
|
||||
class OpenClipSolver:
|
||||
media_script_path = "/opt/Autodesk/mio/current/dl_get_media_info"
|
||||
tmp_name = "_tmp.clip"
|
||||
tmp_file = None
|
||||
class OpenClipSolver(flib.MediaInfoFile):
|
||||
create_new_clip = False
|
||||
|
||||
out_feed_nb_ticks = None
|
||||
out_feed_fps = None
|
||||
out_feed_drop_mode = None
|
||||
|
||||
log = log
|
||||
|
||||
def __init__(self, openclip_file_path, feed_data):
|
||||
# test if media script paht exists
|
||||
self._validate_media_script_path()
|
||||
self.out_file = openclip_file_path
|
||||
|
||||
# new feed variables:
|
||||
feed_path = feed_data["path"]
|
||||
feed_path = feed_data.pop("path")
|
||||
|
||||
# initialize parent class
|
||||
super(OpenClipSolver, self).__init__(
|
||||
feed_path,
|
||||
**feed_data
|
||||
)
|
||||
|
||||
# get other metadata
|
||||
self.feed_version_name = feed_data["version"]
|
||||
self.feed_colorspace = feed_data.get("colorspace")
|
||||
|
||||
if feed_data.get("logger"):
|
||||
self.log = feed_data["logger"]
|
||||
self.log.debug("feed_version_name: {}".format(self.feed_version_name))
|
||||
|
||||
# derivate other feed variables
|
||||
self.feed_basename = os.path.basename(feed_path)
|
||||
self.feed_dir = os.path.dirname(feed_path)
|
||||
self.feed_ext = os.path.splitext(self.feed_basename)[1][1:].lower()
|
||||
|
||||
if not os.path.isfile(openclip_file_path):
|
||||
# openclip does not exist yet and will be created
|
||||
self.tmp_file = self.out_file = openclip_file_path
|
||||
self.log.debug("feed_ext: {}".format(self.feed_ext))
|
||||
self.log.debug("out_file: {}".format(self.out_file))
|
||||
if not self._is_valid_tmp_file(self.out_file):
|
||||
self.create_new_clip = True
|
||||
|
||||
else:
|
||||
# output a temp file
|
||||
self.out_file = openclip_file_path
|
||||
self.tmp_file = os.path.join(self.feed_dir, self.tmp_name)
|
||||
self._clear_tmp_file()
|
||||
def _is_valid_tmp_file(self, file):
|
||||
# check if file exists
|
||||
if os.path.isfile(file):
|
||||
# test also if file is not empty
|
||||
with open(file) as f:
|
||||
lines = f.readlines()
|
||||
|
||||
self.log.info("Temp File: {}".format(self.tmp_file))
|
||||
if len(lines) > 2:
|
||||
return True
|
||||
|
||||
# file is probably corrupted
|
||||
os.remove(file)
|
||||
return False
|
||||
|
||||
def make(self):
|
||||
self._generate_media_info_file()
|
||||
|
||||
if self.create_new_clip:
|
||||
# New openClip
|
||||
|
|
@ -732,42 +732,17 @@ class OpenClipSolver:
|
|||
else:
|
||||
self._update_open_clip()
|
||||
|
||||
def _validate_media_script_path(self):
|
||||
if not os.path.isfile(self.media_script_path):
|
||||
raise IOError("Media Scirpt does not exist: `{}`".format(
|
||||
self.media_script_path))
|
||||
|
||||
def _generate_media_info_file(self):
|
||||
# Create cmd arguments for gettig xml file info file
|
||||
cmd_args = [
|
||||
self.media_script_path,
|
||||
"-e", self.feed_ext,
|
||||
"-o", self.tmp_file,
|
||||
self.feed_dir
|
||||
]
|
||||
|
||||
# execute creation of clip xml template data
|
||||
try:
|
||||
openpype.run_subprocess(cmd_args)
|
||||
except TypeError:
|
||||
self.log.error("Error creating self.tmp_file")
|
||||
six.reraise(*sys.exc_info())
|
||||
|
||||
def _clear_tmp_file(self):
|
||||
if os.path.isfile(self.tmp_file):
|
||||
os.remove(self.tmp_file)
|
||||
|
||||
def _clear_handler(self, xml_object):
|
||||
for handler in xml_object.findall("./handler"):
|
||||
self.log.debug("Handler found")
|
||||
self.log.info("Handler found")
|
||||
xml_object.remove(handler)
|
||||
|
||||
def _create_new_open_clip(self):
|
||||
self.log.info("Building new openClip")
|
||||
self.log.debug(">> self.clip_data: {}".format(self.clip_data))
|
||||
|
||||
tmp_xml = ET.parse(self.tmp_file)
|
||||
|
||||
tmp_xml_feeds = tmp_xml.find('tracks/track/feeds')
|
||||
# clip data comming from MediaInfoFile
|
||||
tmp_xml_feeds = self.clip_data.find('tracks/track/feeds')
|
||||
tmp_xml_feeds.set('currentVersion', self.feed_version_name)
|
||||
for tmp_feed in tmp_xml_feeds:
|
||||
tmp_feed.set('vuid', self.feed_version_name)
|
||||
|
|
@ -778,46 +753,48 @@ class OpenClipSolver:
|
|||
|
||||
self._clear_handler(tmp_feed)
|
||||
|
||||
tmp_xml_versions_obj = tmp_xml.find('versions')
|
||||
tmp_xml_versions_obj = self.clip_data.find('versions')
|
||||
tmp_xml_versions_obj.set('currentVersion', self.feed_version_name)
|
||||
for xml_new_version in tmp_xml_versions_obj:
|
||||
xml_new_version.set('uid', self.feed_version_name)
|
||||
xml_new_version.set('type', 'version')
|
||||
|
||||
xml_data = self._fix_xml_data(tmp_xml)
|
||||
self._clear_handler(self.clip_data)
|
||||
self.log.info("Adding feed version: {}".format(self.feed_basename))
|
||||
|
||||
self._write_result_xml_to_file(xml_data)
|
||||
|
||||
self.log.info("openClip Updated: {}".format(self.tmp_file))
|
||||
self.write_clip_data_to_file(self.out_file, self.clip_data)
|
||||
|
||||
def _update_open_clip(self):
|
||||
self.log.info("Updating openClip ..")
|
||||
|
||||
out_xml = ET.parse(self.out_file)
|
||||
tmp_xml = ET.parse(self.tmp_file)
|
||||
out_xml = out_xml.getroot()
|
||||
|
||||
self.log.debug(">> out_xml: {}".format(out_xml))
|
||||
self.log.debug(">> tmp_xml: {}".format(tmp_xml))
|
||||
self.log.debug(">> self.clip_data: {}".format(self.clip_data))
|
||||
|
||||
# Get new feed from tmp file
|
||||
tmp_xml_feed = tmp_xml.find('tracks/track/feeds/feed')
|
||||
tmp_xml_feed = self.clip_data.find('tracks/track/feeds/feed')
|
||||
|
||||
self._clear_handler(tmp_xml_feed)
|
||||
self._get_time_info_from_origin(out_xml)
|
||||
|
||||
if self.out_feed_fps:
|
||||
# update fps from MediaInfoFile class
|
||||
if self.fps:
|
||||
tmp_feed_fps_obj = tmp_xml_feed.find(
|
||||
"startTimecode/rate")
|
||||
tmp_feed_fps_obj.text = self.out_feed_fps
|
||||
if self.out_feed_nb_ticks:
|
||||
tmp_feed_fps_obj.text = str(self.fps)
|
||||
|
||||
# update start_frame from MediaInfoFile class
|
||||
if self.start_frame:
|
||||
tmp_feed_nb_ticks_obj = tmp_xml_feed.find(
|
||||
"startTimecode/nbTicks")
|
||||
tmp_feed_nb_ticks_obj.text = self.out_feed_nb_ticks
|
||||
if self.out_feed_drop_mode:
|
||||
tmp_feed_nb_ticks_obj.text = str(self.start_frame)
|
||||
|
||||
# update drop_mode from MediaInfoFile class
|
||||
if self.drop_mode:
|
||||
tmp_feed_drop_mode_obj = tmp_xml_feed.find(
|
||||
"startTimecode/dropMode")
|
||||
tmp_feed_drop_mode_obj.text = self.out_feed_drop_mode
|
||||
tmp_feed_drop_mode_obj.text = str(self.drop_mode)
|
||||
|
||||
new_path_obj = tmp_xml_feed.find(
|
||||
"spans/span/path")
|
||||
|
|
@ -850,7 +827,7 @@ class OpenClipSolver:
|
|||
"version", {"type": "version", "uid": self.feed_version_name})
|
||||
out_xml_versions_obj.insert(0, new_version_obj)
|
||||
|
||||
xml_data = self._fix_xml_data(out_xml)
|
||||
self._clear_handler(out_xml)
|
||||
|
||||
# fist create backup
|
||||
self._create_openclip_backup_file(self.out_file)
|
||||
|
|
@ -858,30 +835,9 @@ class OpenClipSolver:
|
|||
self.log.info("Adding feed version: {}".format(
|
||||
self.feed_version_name))
|
||||
|
||||
self._write_result_xml_to_file(xml_data)
|
||||
self.write_clip_data_to_file(self.out_file, out_xml)
|
||||
|
||||
self.log.info("openClip Updated: {}".format(self.out_file))
|
||||
|
||||
self._clear_tmp_file()
|
||||
|
||||
def _get_time_info_from_origin(self, xml_data):
|
||||
try:
|
||||
for out_track in xml_data.iter('track'):
|
||||
for out_feed in out_track.iter('feed'):
|
||||
out_feed_nb_ticks_obj = out_feed.find(
|
||||
'startTimecode/nbTicks')
|
||||
self.out_feed_nb_ticks = out_feed_nb_ticks_obj.text
|
||||
out_feed_fps_obj = out_feed.find(
|
||||
'startTimecode/rate')
|
||||
self.out_feed_fps = out_feed_fps_obj.text
|
||||
out_feed_drop_mode_obj = out_feed.find(
|
||||
'startTimecode/dropMode')
|
||||
self.out_feed_drop_mode = out_feed_drop_mode_obj.text
|
||||
break
|
||||
else:
|
||||
continue
|
||||
except Exception as msg:
|
||||
self.log.warning(msg)
|
||||
self.log.debug("OpenClip Updated: {}".format(self.out_file))
|
||||
|
||||
def _feed_exists(self, xml_data, path):
|
||||
# loop all available feed paths and check if
|
||||
|
|
@ -892,15 +848,6 @@ class OpenClipSolver:
|
|||
"Not appending file as it already is in .clip file")
|
||||
return True
|
||||
|
||||
def _fix_xml_data(self, xml_data):
|
||||
xml_root = xml_data.getroot()
|
||||
self._clear_handler(xml_root)
|
||||
return ET.tostring(xml_root).decode('utf-8')
|
||||
|
||||
def _write_result_xml_to_file(self, xml_data):
|
||||
with open(self.out_file, "w") as f:
|
||||
f.write(xml_data)
|
||||
|
||||
def _create_openclip_backup_file(self, file):
|
||||
bck_file = "{}.bak".format(file)
|
||||
# if backup does not exist
|
||||
|
|
@ -929,6 +876,5 @@ class OpenClipSolver:
|
|||
if feed_clr_obj is not None:
|
||||
feed_clr_obj = ET.Element(
|
||||
"colourSpace", {"type": "string"})
|
||||
feed_clr_obj.text = profile_name
|
||||
feed_storage_obj.append(feed_clr_obj)
|
||||
|
||||
feed_clr_obj.text = profile_name
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
import os
|
||||
from xml.etree import ElementTree as ET
|
||||
from openpype.api import Logger
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
def export_clip(export_path, clip, preset_path, **kwargs):
|
||||
|
|
@ -143,10 +146,40 @@ def modify_preset_file(xml_path, staging_dir, data):
|
|||
|
||||
# change xml following data keys
|
||||
with open(xml_path, "r") as datafile:
|
||||
tree = ET.parse(datafile)
|
||||
_root = ET.parse(datafile)
|
||||
|
||||
for key, value in data.items():
|
||||
for element in tree.findall(".//{}".format(key)):
|
||||
element.text = str(value)
|
||||
tree.write(temp_path)
|
||||
try:
|
||||
if "/" in key:
|
||||
if not key.startswith("./"):
|
||||
key = ".//" + key
|
||||
|
||||
split_key_path = key.split("/")
|
||||
element_key = split_key_path[-1]
|
||||
parent_obj_path = "/".join(split_key_path[:-1])
|
||||
|
||||
parent_obj = _root.find(parent_obj_path)
|
||||
element_obj = parent_obj.find(element_key)
|
||||
if not element_obj:
|
||||
append_element(parent_obj, element_key, value)
|
||||
else:
|
||||
finds = _root.findall(".//{}".format(key))
|
||||
if not finds:
|
||||
raise AttributeError
|
||||
for element in finds:
|
||||
element.text = str(value)
|
||||
except AttributeError:
|
||||
log.warning(
|
||||
"Cannot create attribute: {}: {}. Skipping".format(
|
||||
key, value
|
||||
))
|
||||
_root.write(temp_path)
|
||||
|
||||
return temp_path
|
||||
|
||||
|
||||
def append_element(root_element_obj, key, value):
|
||||
new_element_obj = ET.Element(key)
|
||||
log.debug("__ new_element_obj: {}".format(new_element_obj))
|
||||
new_element_obj.text = str(value)
|
||||
root_element_obj.insert(0, new_element_obj)
|
||||
|
|
|
|||
|
|
@ -185,7 +185,9 @@ class WireTapCom(object):
|
|||
|
||||
exit_code = subprocess.call(
|
||||
project_create_cmd,
|
||||
cwd=os.path.expanduser('~'))
|
||||
cwd=os.path.expanduser('~'),
|
||||
preexec_fn=_subprocess_preexec_fn
|
||||
)
|
||||
|
||||
if exit_code != 0:
|
||||
RuntimeError("Cannot create project in flame db")
|
||||
|
|
@ -254,7 +256,7 @@ class WireTapCom(object):
|
|||
filtered_users = [user for user in used_names if user_name in user]
|
||||
|
||||
if filtered_users:
|
||||
# todo: need to find lastly created following regex pattern for
|
||||
# TODO: need to find lastly created following regex pattern for
|
||||
# date used in name
|
||||
return filtered_users.pop()
|
||||
|
||||
|
|
@ -448,7 +450,9 @@ class WireTapCom(object):
|
|||
|
||||
exit_code = subprocess.call(
|
||||
project_colorspace_cmd,
|
||||
cwd=os.path.expanduser('~'))
|
||||
cwd=os.path.expanduser('~'),
|
||||
preexec_fn=_subprocess_preexec_fn
|
||||
)
|
||||
|
||||
if exit_code != 0:
|
||||
RuntimeError("Cannot set colorspace {} on project {}".format(
|
||||
|
|
@ -456,6 +460,15 @@ class WireTapCom(object):
|
|||
))
|
||||
|
||||
|
||||
def _subprocess_preexec_fn():
|
||||
""" Helper function
|
||||
|
||||
Setting permission mask to 0777
|
||||
"""
|
||||
os.setpgrp()
|
||||
os.umask(0o000)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# get json exchange data
|
||||
json_path = sys.argv[-1]
|
||||
|
|
|
|||
|
|
@ -11,8 +11,6 @@ from . import utils
|
|||
import flame
|
||||
from pprint import pformat
|
||||
|
||||
reload(utils) # noqa
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
@ -96,83 +94,30 @@ def create_otio_time_range(start_frame, frame_duration, fps):
|
|||
|
||||
def _get_metadata(item):
|
||||
if hasattr(item, 'metadata'):
|
||||
if not item.metadata:
|
||||
return {}
|
||||
return {key: value for key, value in dict(item.metadata)}
|
||||
return dict(item.metadata) if item.metadata else {}
|
||||
return {}
|
||||
|
||||
|
||||
def create_time_effects(otio_clip, item):
|
||||
# todo #2426: add retiming effects to export
|
||||
# get all subtrack items
|
||||
# subTrackItems = flatten(track_item.parent().subTrackItems())
|
||||
# speed = track_item.playbackSpeed()
|
||||
def create_time_effects(otio_clip, speed):
|
||||
otio_effect = None
|
||||
|
||||
# otio_effect = None
|
||||
# # retime on track item
|
||||
# if speed != 1.:
|
||||
# # make effect
|
||||
# otio_effect = otio.schema.LinearTimeWarp()
|
||||
# otio_effect.name = "Speed"
|
||||
# otio_effect.time_scalar = speed
|
||||
# otio_effect.metadata = {}
|
||||
# retime on track item
|
||||
if speed != 1.:
|
||||
# make effect
|
||||
otio_effect = otio.schema.LinearTimeWarp()
|
||||
otio_effect.name = "Speed"
|
||||
otio_effect.time_scalar = speed
|
||||
otio_effect.metadata = {}
|
||||
|
||||
# # freeze frame effect
|
||||
# if speed == 0.:
|
||||
# otio_effect = otio.schema.FreezeFrame()
|
||||
# otio_effect.name = "FreezeFrame"
|
||||
# otio_effect.metadata = {}
|
||||
# freeze frame effect
|
||||
if speed == 0.:
|
||||
otio_effect = otio.schema.FreezeFrame()
|
||||
otio_effect.name = "FreezeFrame"
|
||||
otio_effect.metadata = {}
|
||||
|
||||
# if otio_effect:
|
||||
# # add otio effect to clip effects
|
||||
# otio_clip.effects.append(otio_effect)
|
||||
|
||||
# # loop through and get all Timewarps
|
||||
# for effect in subTrackItems:
|
||||
# if ((track_item not in effect.linkedItems())
|
||||
# and (len(effect.linkedItems()) > 0)):
|
||||
# continue
|
||||
# # avoid all effect which are not TimeWarp and disabled
|
||||
# if "TimeWarp" not in effect.name():
|
||||
# continue
|
||||
|
||||
# if not effect.isEnabled():
|
||||
# continue
|
||||
|
||||
# node = effect.node()
|
||||
# name = node["name"].value()
|
||||
|
||||
# # solve effect class as effect name
|
||||
# _name = effect.name()
|
||||
# if "_" in _name:
|
||||
# effect_name = re.sub(r"(?:_)[_0-9]+", "", _name) # more numbers
|
||||
# else:
|
||||
# effect_name = re.sub(r"\d+", "", _name) # one number
|
||||
|
||||
# metadata = {}
|
||||
# # add knob to metadata
|
||||
# for knob in ["lookup", "length"]:
|
||||
# value = node[knob].value()
|
||||
# animated = node[knob].isAnimated()
|
||||
# if animated:
|
||||
# value = [
|
||||
# ((node[knob].getValueAt(i)) - i)
|
||||
# for i in range(
|
||||
# track_item.timelineIn(),
|
||||
# track_item.timelineOut() + 1)
|
||||
# ]
|
||||
|
||||
# metadata[knob] = value
|
||||
|
||||
# # make effect
|
||||
# otio_effect = otio.schema.TimeEffect()
|
||||
# otio_effect.name = name
|
||||
# otio_effect.effect_name = effect_name
|
||||
# otio_effect.metadata = metadata
|
||||
|
||||
# # add otio effect to clip effects
|
||||
# otio_clip.effects.append(otio_effect)
|
||||
pass
|
||||
if otio_effect:
|
||||
# add otio effect to clip effects
|
||||
otio_clip.effects.append(otio_effect)
|
||||
|
||||
|
||||
def _get_marker_color(flame_colour):
|
||||
|
|
@ -260,31 +205,22 @@ def create_otio_markers(otio_item, item):
|
|||
otio_item.markers.append(otio_marker)
|
||||
|
||||
|
||||
def create_otio_reference(clip_data):
|
||||
def create_otio_reference(clip_data, fps=None):
|
||||
metadata = _get_metadata(clip_data)
|
||||
duration = int(clip_data["source_duration"])
|
||||
|
||||
# get file info for path and start frame
|
||||
frame_start = 0
|
||||
fps = CTX.get_fps()
|
||||
fps = fps or CTX.get_fps()
|
||||
|
||||
path = clip_data["fpath"]
|
||||
|
||||
reel_clip = None
|
||||
match_reel_clip = [
|
||||
clip for clip in CTX.clips
|
||||
if clip["fpath"] == path
|
||||
]
|
||||
if match_reel_clip:
|
||||
reel_clip = match_reel_clip.pop()
|
||||
fps = reel_clip["fps"]
|
||||
|
||||
file_name = os.path.basename(path)
|
||||
file_head, extension = os.path.splitext(file_name)
|
||||
|
||||
# get padding and other file infos
|
||||
log.debug("_ path: {}".format(path))
|
||||
|
||||
frame_duration = clip_data["source_duration"]
|
||||
otio_ex_ref_item = None
|
||||
|
||||
is_sequence = frame_number = utils.get_frame_from_filename(file_name)
|
||||
|
|
@ -311,7 +247,7 @@ def create_otio_reference(clip_data):
|
|||
rate=fps,
|
||||
available_range=create_otio_time_range(
|
||||
frame_start,
|
||||
frame_duration,
|
||||
duration,
|
||||
fps
|
||||
)
|
||||
)
|
||||
|
|
@ -327,7 +263,7 @@ def create_otio_reference(clip_data):
|
|||
target_url=reformated_path,
|
||||
available_range=create_otio_time_range(
|
||||
frame_start,
|
||||
frame_duration,
|
||||
duration,
|
||||
fps
|
||||
)
|
||||
)
|
||||
|
|
@ -339,19 +275,84 @@ def create_otio_reference(clip_data):
|
|||
|
||||
|
||||
def create_otio_clip(clip_data):
|
||||
from openpype.hosts.flame.api import MediaInfoFile, TimeEffectMetadata
|
||||
|
||||
segment = clip_data["PySegment"]
|
||||
|
||||
# create media reference
|
||||
media_reference = create_otio_reference(clip_data)
|
||||
|
||||
# calculate source in
|
||||
first_frame = utils.get_frame_from_filename(clip_data["fpath"]) or 0
|
||||
source_in = int(clip_data["source_in"]) - int(first_frame)
|
||||
media_info = MediaInfoFile(clip_data["fpath"], logger=log)
|
||||
media_timecode_start = media_info.start_frame
|
||||
media_fps = media_info.fps
|
||||
|
||||
# Timewarp metadata
|
||||
tw_data = TimeEffectMetadata(segment, logger=log).data
|
||||
log.debug("__ tw_data: {}".format(tw_data))
|
||||
|
||||
# define first frame
|
||||
file_first_frame = utils.get_frame_from_filename(
|
||||
clip_data["fpath"])
|
||||
if file_first_frame:
|
||||
file_first_frame = int(file_first_frame)
|
||||
|
||||
first_frame = media_timecode_start or file_first_frame or 0
|
||||
|
||||
_clip_source_in = int(clip_data["source_in"])
|
||||
_clip_source_out = int(clip_data["source_out"])
|
||||
_clip_record_in = clip_data["record_in"]
|
||||
_clip_record_out = clip_data["record_out"]
|
||||
_clip_record_duration = int(clip_data["record_duration"])
|
||||
|
||||
log.debug("_ file_first_frame: {}".format(file_first_frame))
|
||||
log.debug("_ first_frame: {}".format(first_frame))
|
||||
log.debug("_ _clip_source_in: {}".format(_clip_source_in))
|
||||
log.debug("_ _clip_source_out: {}".format(_clip_source_out))
|
||||
log.debug("_ _clip_record_in: {}".format(_clip_record_in))
|
||||
log.debug("_ _clip_record_out: {}".format(_clip_record_out))
|
||||
|
||||
# first solve if the reverse timing
|
||||
speed = 1
|
||||
if clip_data["source_in"] > clip_data["source_out"]:
|
||||
source_in = _clip_source_out - int(first_frame)
|
||||
source_out = _clip_source_in - int(first_frame)
|
||||
speed = -1
|
||||
else:
|
||||
source_in = _clip_source_in - int(first_frame)
|
||||
source_out = _clip_source_out - int(first_frame)
|
||||
|
||||
log.debug("_ source_in: {}".format(source_in))
|
||||
log.debug("_ source_out: {}".format(source_out))
|
||||
|
||||
if file_first_frame:
|
||||
log.debug("_ file_source_in: {}".format(
|
||||
file_first_frame + source_in))
|
||||
log.debug("_ file_source_in: {}".format(
|
||||
file_first_frame + source_out))
|
||||
|
||||
source_duration = (source_out - source_in + 1)
|
||||
|
||||
# secondly check if any change of speed
|
||||
if source_duration != _clip_record_duration:
|
||||
retime_speed = float(source_duration) / float(_clip_record_duration)
|
||||
log.debug("_ calculated speed: {}".format(retime_speed))
|
||||
speed *= retime_speed
|
||||
|
||||
# get speed from metadata if available
|
||||
if tw_data.get("speed"):
|
||||
speed = tw_data["speed"]
|
||||
log.debug("_ metadata speed: {}".format(speed))
|
||||
|
||||
log.debug("_ speed: {}".format(speed))
|
||||
log.debug("_ source_duration: {}".format(source_duration))
|
||||
log.debug("_ _clip_record_duration: {}".format(_clip_record_duration))
|
||||
|
||||
# create media reference
|
||||
media_reference = create_otio_reference(
|
||||
clip_data, media_fps)
|
||||
|
||||
# creatae source range
|
||||
source_range = create_otio_time_range(
|
||||
source_in,
|
||||
clip_data["record_duration"],
|
||||
_clip_record_duration,
|
||||
CTX.get_fps()
|
||||
)
|
||||
|
||||
|
|
@ -365,6 +366,9 @@ def create_otio_clip(clip_data):
|
|||
if MARKERS_INCLUDE:
|
||||
create_otio_markers(otio_clip, segment)
|
||||
|
||||
if speed != 1:
|
||||
create_time_effects(otio_clip, speed)
|
||||
|
||||
return otio_clip
|
||||
|
||||
|
||||
|
|
@ -378,38 +382,6 @@ def create_otio_gap(gap_start, clip_start, tl_start_frame, fps):
|
|||
)
|
||||
|
||||
|
||||
def get_clips_in_reels(project):
|
||||
output_clips = []
|
||||
project_desktop = project.current_workspace.desktop
|
||||
|
||||
for reel_group in project_desktop.reel_groups:
|
||||
for reel in reel_group.reels:
|
||||
for clip in reel.clips:
|
||||
clip_data = {
|
||||
"PyClip": clip,
|
||||
"fps": float(str(clip.frame_rate)[:-4])
|
||||
}
|
||||
|
||||
attrs = [
|
||||
"name", "width", "height",
|
||||
"ratio", "sample_rate", "bit_depth"
|
||||
]
|
||||
|
||||
for attr in attrs:
|
||||
val = getattr(clip, attr)
|
||||
clip_data[attr] = val
|
||||
|
||||
version = clip.versions[-1]
|
||||
track = version.tracks[-1]
|
||||
for segment in track.segments:
|
||||
segment_data = _get_segment_attributes(segment)
|
||||
clip_data.update(segment_data)
|
||||
|
||||
output_clips.append(clip_data)
|
||||
|
||||
return output_clips
|
||||
|
||||
|
||||
def _get_colourspace_policy():
|
||||
|
||||
output = {}
|
||||
|
|
@ -493,9 +465,6 @@ def _get_shot_tokens_values(clip, tokens):
|
|||
old_value = None
|
||||
output = {}
|
||||
|
||||
if not clip.shot_name:
|
||||
return output
|
||||
|
||||
old_value = clip.shot_name.get_value()
|
||||
|
||||
for token in tokens:
|
||||
|
|
@ -513,15 +482,21 @@ def _get_shot_tokens_values(clip, tokens):
|
|||
|
||||
|
||||
def _get_segment_attributes(segment):
|
||||
# log.debug(dir(segment))
|
||||
|
||||
if str(segment.name)[1:-1] == "":
|
||||
log.debug("Segment name|hidden: {}|{}".format(
|
||||
segment.name.get_value(), segment.hidden
|
||||
))
|
||||
if (
|
||||
segment.name.get_value() == ""
|
||||
or segment.hidden.get_value()
|
||||
):
|
||||
return None
|
||||
|
||||
# Add timeline segment to tree
|
||||
clip_data = {
|
||||
"segment_name": segment.name.get_value(),
|
||||
"segment_comment": segment.comment.get_value(),
|
||||
"shot_name": segment.shot_name.get_value(),
|
||||
"tape_name": segment.tape_name,
|
||||
"source_name": segment.source_name,
|
||||
"fpath": segment.file_path,
|
||||
|
|
@ -529,9 +504,10 @@ def _get_segment_attributes(segment):
|
|||
}
|
||||
|
||||
# add all available shot tokens
|
||||
shot_tokens = _get_shot_tokens_values(segment, [
|
||||
"<colour space>", "<width>", "<height>", "<depth>",
|
||||
])
|
||||
shot_tokens = _get_shot_tokens_values(
|
||||
segment,
|
||||
["<colour space>", "<width>", "<height>", "<depth>"]
|
||||
)
|
||||
clip_data.update(shot_tokens)
|
||||
|
||||
# populate shot source metadata
|
||||
|
|
@ -561,11 +537,6 @@ def create_otio_timeline(sequence):
|
|||
log.info(sequence.attributes)
|
||||
|
||||
CTX.project = get_current_flame_project()
|
||||
CTX.clips = get_clips_in_reels(CTX.project)
|
||||
|
||||
log.debug(pformat(
|
||||
CTX.clips
|
||||
))
|
||||
|
||||
# get current timeline
|
||||
CTX.set_fps(
|
||||
|
|
@ -583,8 +554,13 @@ def create_otio_timeline(sequence):
|
|||
# create otio tracks and clips
|
||||
for ver in sequence.versions:
|
||||
for track in ver.tracks:
|
||||
if len(track.segments) == 0 and track.hidden:
|
||||
return None
|
||||
# avoid all empty tracks
|
||||
# or hidden tracks
|
||||
if (
|
||||
len(track.segments) == 0
|
||||
or track.hidden.get_value()
|
||||
):
|
||||
continue
|
||||
|
||||
# convert track to otio
|
||||
otio_track = create_otio_track(
|
||||
|
|
@ -597,11 +573,7 @@ def create_otio_timeline(sequence):
|
|||
continue
|
||||
all_segments.append(clip_data)
|
||||
|
||||
segments_ordered = {
|
||||
itemindex: clip_data
|
||||
for itemindex, clip_data in enumerate(
|
||||
all_segments)
|
||||
}
|
||||
segments_ordered = dict(enumerate(all_segments))
|
||||
log.debug("_ segments_ordered: {}".format(
|
||||
pformat(segments_ordered)
|
||||
))
|
||||
|
|
@ -612,15 +584,11 @@ def create_otio_timeline(sequence):
|
|||
log.debug("_ itemindex: {}".format(itemindex))
|
||||
|
||||
# Add Gap if needed
|
||||
if itemindex == 0:
|
||||
# if it is first track item at track then add
|
||||
# it to previous item
|
||||
prev_item = segment_data
|
||||
|
||||
else:
|
||||
# get previous item
|
||||
prev_item = segments_ordered[itemindex - 1]
|
||||
|
||||
prev_item = (
|
||||
segment_data
|
||||
if itemindex == 0
|
||||
else segments_ordered[itemindex - 1]
|
||||
)
|
||||
log.debug("_ segment_data: {}".format(segment_data))
|
||||
|
||||
# calculate clip frame range difference from each other
|
||||
|
|
|
|||
|
|
@ -268,6 +268,14 @@ class CreateShotClip(opfapi.Creator):
|
|||
"target": "tag",
|
||||
"toolTip": "Handle at end of clip", # noqa
|
||||
"order": 2
|
||||
},
|
||||
"includeHandles": {
|
||||
"value": False,
|
||||
"type": "QCheckBox",
|
||||
"label": "Include handles",
|
||||
"target": "tag",
|
||||
"toolTip": "By default handles are excluded", # noqa
|
||||
"order": 3
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import os
|
|||
import flame
|
||||
from pprint import pformat
|
||||
import openpype.hosts.flame.api as opfapi
|
||||
|
||||
from openpype.lib import StringTemplate
|
||||
|
||||
class LoadClip(opfapi.ClipLoader):
|
||||
"""Load a subset to timeline as clip
|
||||
|
|
@ -22,7 +22,7 @@ class LoadClip(opfapi.ClipLoader):
|
|||
# settings
|
||||
reel_group_name = "OpenPype_Reels"
|
||||
reel_name = "Loaded"
|
||||
clip_name_template = "{asset}_{subset}_{representation}"
|
||||
clip_name_template = "{asset}_{subset}<_{output}>"
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
|
||||
|
|
@ -36,10 +36,10 @@ class LoadClip(opfapi.ClipLoader):
|
|||
version_data = version.get("data", {})
|
||||
version_name = version.get("name", None)
|
||||
colorspace = version_data.get("colorspace", None)
|
||||
clip_name = self.clip_name_template.format(
|
||||
**context["representation"]["context"])
|
||||
clip_name = StringTemplate(self.clip_name_template).format(
|
||||
context["representation"]["context"])
|
||||
|
||||
# todo: settings in imageio
|
||||
# TODO: settings in imageio
|
||||
# convert colorspace with ocio to flame mapping
|
||||
# in imageio flame section
|
||||
colorspace = colorspace
|
||||
|
|
|
|||
140
openpype/hosts/flame/plugins/load/load_clip_batch.py
Normal file
140
openpype/hosts/flame/plugins/load/load_clip_batch.py
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
import os
|
||||
import flame
|
||||
from pprint import pformat
|
||||
import openpype.hosts.flame.api as opfapi
|
||||
from openpype.lib import StringTemplate
|
||||
|
||||
|
||||
class LoadClipBatch(opfapi.ClipLoader):
|
||||
"""Load a subset to timeline as clip
|
||||
|
||||
Place clip to timeline on its asset origin timings collected
|
||||
during conforming to project
|
||||
"""
|
||||
|
||||
families = ["render2d", "source", "plate", "render", "review"]
|
||||
representations = ["exr", "dpx", "jpg", "jpeg", "png", "h264"]
|
||||
|
||||
label = "Load as clip to current batch"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
# settings
|
||||
reel_name = "OP_LoadedReel"
|
||||
clip_name_template = "{asset}_{subset}<_{output}>"
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
|
||||
# get flame objects
|
||||
self.batch = options.get("batch") or flame.batch
|
||||
|
||||
# load clip to timeline and get main variables
|
||||
namespace = namespace
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
version_name = version.get("name", None)
|
||||
colorspace = version_data.get("colorspace", None)
|
||||
|
||||
# in case output is not in context replace key to representation
|
||||
if not context["representation"]["context"].get("output"):
|
||||
self.clip_name_template.replace("output", "representation")
|
||||
|
||||
clip_name = StringTemplate(self.clip_name_template).format(
|
||||
context["representation"]["context"])
|
||||
|
||||
# TODO: settings in imageio
|
||||
# convert colorspace with ocio to flame mapping
|
||||
# in imageio flame section
|
||||
colorspace = colorspace
|
||||
|
||||
# create workfile path
|
||||
workfile_dir = options.get("workdir") or os.environ["AVALON_WORKDIR"]
|
||||
openclip_dir = os.path.join(
|
||||
workfile_dir, clip_name
|
||||
)
|
||||
openclip_path = os.path.join(
|
||||
openclip_dir, clip_name + ".clip"
|
||||
)
|
||||
if not os.path.exists(openclip_dir):
|
||||
os.makedirs(openclip_dir)
|
||||
|
||||
# prepare clip data from context ad send it to openClipLoader
|
||||
loading_context = {
|
||||
"path": self.fname.replace("\\", "/"),
|
||||
"colorspace": colorspace,
|
||||
"version": "v{:0>3}".format(version_name),
|
||||
"logger": self.log
|
||||
|
||||
}
|
||||
self.log.debug(pformat(
|
||||
loading_context
|
||||
))
|
||||
self.log.debug(openclip_path)
|
||||
|
||||
# make openpype clip file
|
||||
opfapi.OpenClipSolver(openclip_path, loading_context).make()
|
||||
|
||||
# prepare Reel group in actual desktop
|
||||
opc = self._get_clip(
|
||||
clip_name,
|
||||
openclip_path
|
||||
)
|
||||
|
||||
# add additional metadata from the version to imprint Avalon knob
|
||||
add_keys = [
|
||||
"frameStart", "frameEnd", "source", "author",
|
||||
"fps", "handleStart", "handleEnd"
|
||||
]
|
||||
|
||||
# move all version data keys to tag data
|
||||
data_imprint = {
|
||||
key: version_data.get(key, str(None))
|
||||
for key in add_keys
|
||||
}
|
||||
# add variables related to version context
|
||||
data_imprint.update({
|
||||
"version": version_name,
|
||||
"colorspace": colorspace,
|
||||
"objectName": clip_name
|
||||
})
|
||||
|
||||
# TODO: finish the containerisation
|
||||
# opc_segment = opfapi.get_clip_segment(opc)
|
||||
|
||||
# return opfapi.containerise(
|
||||
# opc_segment,
|
||||
# name, namespace, context,
|
||||
# self.__class__.__name__,
|
||||
# data_imprint)
|
||||
|
||||
return opc
|
||||
|
||||
def _get_clip(self, name, clip_path):
|
||||
reel = self._get_reel()
|
||||
|
||||
# with maintained openclip as opc
|
||||
matching_clip = None
|
||||
for cl in reel.clips:
|
||||
if cl.name.get_value() != name:
|
||||
continue
|
||||
matching_clip = cl
|
||||
|
||||
if not matching_clip:
|
||||
created_clips = flame.import_clips(str(clip_path), reel)
|
||||
return created_clips.pop()
|
||||
|
||||
return matching_clip
|
||||
|
||||
def _get_reel(self):
|
||||
|
||||
matching_reel = [
|
||||
rg for rg in self.batch.reels
|
||||
if rg.name.get_value() == self.reel_name
|
||||
]
|
||||
|
||||
return (
|
||||
matching_reel.pop()
|
||||
if matching_reel
|
||||
else self.batch.create_reel(str(self.reel_name))
|
||||
)
|
||||
|
|
@ -1,8 +1,12 @@
|
|||
import re
|
||||
from types import NoneType
|
||||
import pyblish
|
||||
import openpype
|
||||
import openpype.hosts.flame.api as opfapi
|
||||
from openpype.hosts.flame.otio import flame_export
|
||||
from openpype.pipeline.editorial import (
|
||||
is_overlapping_otio_ranges,
|
||||
get_media_range_with_retimes
|
||||
)
|
||||
|
||||
# # developer reload modules
|
||||
from pprint import pformat
|
||||
|
|
@ -21,30 +25,22 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
audio_track_items = []
|
||||
|
||||
# TODO: add to settings
|
||||
# settings
|
||||
xml_preset_attrs_from_comments = {
|
||||
"width": "number",
|
||||
"height": "number",
|
||||
"pixelRatio": "float",
|
||||
"resizeType": "string",
|
||||
"resizeFilter": "string"
|
||||
}
|
||||
xml_preset_attrs_from_comments = []
|
||||
add_tasks = []
|
||||
|
||||
def process(self, context):
|
||||
project = context.data["flameProject"]
|
||||
sequence = context.data["flameSequence"]
|
||||
selected_segments = context.data["flameSelectedSegments"]
|
||||
self.log.debug("__ selected_segments: {}".format(selected_segments))
|
||||
|
||||
self.otio_timeline = context.data["otioTimeline"]
|
||||
self.clips_in_reels = opfapi.get_clips_in_reels(project)
|
||||
self.fps = context.data["fps"]
|
||||
|
||||
# process all sellected
|
||||
for segment in selected_segments:
|
||||
# get openpype tag data
|
||||
marker_data = opfapi.get_segment_data_marker(segment)
|
||||
|
||||
self.log.debug("__ marker_data: {}".format(
|
||||
pformat(marker_data)))
|
||||
|
||||
|
|
@ -67,15 +63,33 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
clip_name = clip_data["segment_name"]
|
||||
self.log.debug("clip_name: {}".format(clip_name))
|
||||
|
||||
# get otio clip data
|
||||
otio_data = self._get_otio_clip_instance_data(clip_data) or {}
|
||||
self.log.debug("__ otio_data: {}".format(pformat(otio_data)))
|
||||
|
||||
# get file path
|
||||
file_path = clip_data["fpath"]
|
||||
|
||||
# get source clip
|
||||
source_clip = self._get_reel_clip(file_path)
|
||||
|
||||
first_frame = opfapi.get_frame_from_filename(file_path) or 0
|
||||
|
||||
head, tail = self._get_head_tail(clip_data, first_frame)
|
||||
head, tail = self._get_head_tail(
|
||||
clip_data,
|
||||
otio_data["otioClip"],
|
||||
marker_data["handleStart"],
|
||||
marker_data["handleEnd"]
|
||||
)
|
||||
|
||||
# make sure there is not NoneType rather 0
|
||||
if isinstance(head, NoneType):
|
||||
head = 0
|
||||
if isinstance(tail, NoneType):
|
||||
tail = 0
|
||||
|
||||
# make sure value is absolute
|
||||
if head != 0:
|
||||
head = abs(head)
|
||||
if tail != 0:
|
||||
tail = abs(tail)
|
||||
|
||||
# solve handles length
|
||||
marker_data["handleStart"] = min(
|
||||
|
|
@ -83,11 +97,16 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
marker_data["handleEnd"] = min(
|
||||
marker_data["handleEnd"], tail)
|
||||
|
||||
workfile_start = self._set_workfile_start(marker_data)
|
||||
|
||||
with_audio = bool(marker_data.pop("audio"))
|
||||
|
||||
# add marker data to instance data
|
||||
inst_data = dict(marker_data.items())
|
||||
|
||||
# add ocio_data to instance data
|
||||
inst_data.update(otio_data)
|
||||
|
||||
asset = marker_data["asset"]
|
||||
subset = marker_data["subset"]
|
||||
|
||||
|
|
@ -110,17 +129,16 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
"families": families,
|
||||
"publish": marker_data["publish"],
|
||||
"fps": self.fps,
|
||||
"flameSourceClip": source_clip,
|
||||
"workfileFrameStart": workfile_start,
|
||||
"sourceFirstFrame": int(first_frame),
|
||||
"path": file_path
|
||||
"path": file_path,
|
||||
"flameAddTasks": self.add_tasks,
|
||||
"tasks": {
|
||||
task["name"]: {"type": task["type"]}
|
||||
for task in self.add_tasks},
|
||||
"representations": [],
|
||||
"newAssetPublishing": True
|
||||
})
|
||||
|
||||
# get otio clip data
|
||||
otio_data = self._get_otio_clip_instance_data(clip_data) or {}
|
||||
self.log.debug("__ otio_data: {}".format(pformat(otio_data)))
|
||||
|
||||
# add to instance data
|
||||
inst_data.update(otio_data)
|
||||
self.log.debug("__ inst_data: {}".format(pformat(inst_data)))
|
||||
|
||||
# add resolution
|
||||
|
|
@ -154,6 +172,17 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
if marker_data.get("reviewTrack") is not None:
|
||||
instance.data["reviewAudio"] = True
|
||||
|
||||
@staticmethod
|
||||
def _set_workfile_start(data):
|
||||
include_handles = data.get("includeHandles")
|
||||
workfile_start = data["workfileFrameStart"]
|
||||
handle_start = data["handleStart"]
|
||||
|
||||
if include_handles:
|
||||
workfile_start += handle_start
|
||||
|
||||
return workfile_start
|
||||
|
||||
def _get_comment_attributes(self, segment):
|
||||
comment = segment.comment.get_value()
|
||||
|
||||
|
|
@ -187,7 +216,10 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
# split to key and value
|
||||
key, value = split.split(":")
|
||||
|
||||
for a_name, a_type in self.xml_preset_attrs_from_comments.items():
|
||||
for attr_data in self.xml_preset_attrs_from_comments:
|
||||
a_name = attr_data["name"]
|
||||
a_type = attr_data["type"]
|
||||
|
||||
# exclude all not related attributes
|
||||
if a_name.lower() not in key.lower():
|
||||
continue
|
||||
|
|
@ -242,29 +274,26 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
return split_comments
|
||||
|
||||
def _get_head_tail(self, clip_data, first_frame):
|
||||
def _get_head_tail(self, clip_data, otio_clip, handle_start, handle_end):
|
||||
# calculate head and tail with forward compatibility
|
||||
head = clip_data.get("segment_head")
|
||||
tail = clip_data.get("segment_tail")
|
||||
self.log.debug("__ head: `{}`".format(head))
|
||||
self.log.debug("__ tail: `{}`".format(tail))
|
||||
|
||||
# HACK: it is here to serve for versions bellow 2021.1
|
||||
if not any([head, tail]):
|
||||
retimed_attributes = get_media_range_with_retimes(
|
||||
otio_clip, handle_start, handle_end)
|
||||
self.log.debug(
|
||||
">> retimed_attributes: {}".format(retimed_attributes))
|
||||
|
||||
# retimed head and tail
|
||||
head = int(retimed_attributes["handleStart"])
|
||||
tail = int(retimed_attributes["handleEnd"])
|
||||
|
||||
if not head:
|
||||
head = int(clip_data["source_in"]) - int(first_frame)
|
||||
if not tail:
|
||||
tail = int(
|
||||
clip_data["source_duration"] - (
|
||||
head + clip_data["record_duration"]
|
||||
)
|
||||
)
|
||||
return head, tail
|
||||
|
||||
def _get_reel_clip(self, path):
|
||||
match_reel_clip = [
|
||||
clip for clip in self.clips_in_reels
|
||||
if clip["fpath"] == path
|
||||
]
|
||||
if match_reel_clip:
|
||||
return match_reel_clip.pop()
|
||||
|
||||
def _get_resolution_to_data(self, data, context):
|
||||
assert data.get("otioClip"), "Missing `otioClip` data"
|
||||
|
||||
|
|
@ -353,7 +382,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
continue
|
||||
if otio_clip.name not in segment.name.get_value():
|
||||
continue
|
||||
if openpype.lib.is_overlapping_otio_ranges(
|
||||
if is_overlapping_otio_ranges(
|
||||
parent_range, timeline_range, strict=True):
|
||||
|
||||
# add pypedata marker to otio_clip metadata
|
||||
|
|
|
|||
|
|
@ -1,8 +1,9 @@
|
|||
import pyblish.api
|
||||
import avalon.api as avalon
|
||||
import openpype.lib as oplib
|
||||
|
||||
import openpype.hosts.flame.api as opfapi
|
||||
from openpype.hosts.flame.otio import flame_export
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.create import get_subset_name
|
||||
|
||||
|
||||
class CollecTimelineOTIO(pyblish.api.ContextPlugin):
|
||||
|
|
@ -18,16 +19,19 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin):
|
|||
|
||||
# main
|
||||
asset_doc = context.data["assetEntity"]
|
||||
task_name = avalon.Session["AVALON_TASK"]
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
project = opfapi.get_current_project()
|
||||
sequence = opfapi.get_current_sequence(opfapi.CTX.selection)
|
||||
|
||||
# create subset name
|
||||
subset_name = oplib.get_subset_name_with_asset_doc(
|
||||
subset_name = get_subset_name(
|
||||
family,
|
||||
variant,
|
||||
task_name,
|
||||
asset_doc,
|
||||
context.data["projectName"],
|
||||
context.data["hostName"],
|
||||
project_settings=context.data["project_settings"]
|
||||
)
|
||||
|
||||
# adding otio timeline to context
|
||||
|
|
@ -38,7 +42,8 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin):
|
|||
"name": subset_name,
|
||||
"asset": asset_doc["name"],
|
||||
"subset": subset_name,
|
||||
"family": "workfile"
|
||||
"family": "workfile",
|
||||
"families": []
|
||||
}
|
||||
|
||||
# create instance with workfile
|
||||
|
|
|
|||
|
|
@ -1,10 +1,18 @@
|
|||
import os
|
||||
import re
|
||||
import tempfile
|
||||
from pprint import pformat
|
||||
from copy import deepcopy
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
from openpype.hosts.flame import api as opfapi
|
||||
from openpype.hosts.flame.api import MediaInfoFile
|
||||
from openpype.pipeline.editorial import (
|
||||
get_media_range_with_retimes
|
||||
)
|
||||
|
||||
import flame
|
||||
|
||||
|
||||
class ExtractSubsetResources(openpype.api.Extractor):
|
||||
|
|
@ -18,32 +26,22 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
hosts = ["flame"]
|
||||
|
||||
# plugin defaults
|
||||
keep_original_representation = False
|
||||
|
||||
default_presets = {
|
||||
"thumbnail": {
|
||||
"active": True,
|
||||
"ext": "jpg",
|
||||
"xml_preset_file": "Jpeg (8-bit).xml",
|
||||
"xml_preset_dir": "",
|
||||
"export_type": "File Sequence",
|
||||
"ignore_comment_attrs": True,
|
||||
"parsed_comment_attrs": False,
|
||||
"colorspace_out": "Output - sRGB",
|
||||
"representation_add_range": False,
|
||||
"representation_tags": ["thumbnail"]
|
||||
},
|
||||
"ftrackpreview": {
|
||||
"ext": "mov",
|
||||
"xml_preset_file": "Apple iPad (1920x1080).xml",
|
||||
"xml_preset_dir": "",
|
||||
"export_type": "Movie",
|
||||
"ignore_comment_attrs": True,
|
||||
"colorspace_out": "Output - Rec.709",
|
||||
"representation_add_range": True,
|
||||
"representation_tags": [
|
||||
"review",
|
||||
"delete"
|
||||
]
|
||||
"representation_tags": ["thumbnail"],
|
||||
"path_regex": ".*"
|
||||
}
|
||||
}
|
||||
keep_original_representation = False
|
||||
|
||||
# hide publisher during exporting
|
||||
hide_ui_on_process = True
|
||||
|
|
@ -52,18 +50,16 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
export_presets_mapping = {}
|
||||
|
||||
def process(self, instance):
|
||||
if (
|
||||
self.keep_original_representation
|
||||
and "representations" not in instance.data
|
||||
or not self.keep_original_representation
|
||||
):
|
||||
if not self.keep_original_representation:
|
||||
# remove previeous representation if not needed
|
||||
instance.data["representations"] = []
|
||||
|
||||
# flame objects
|
||||
segment = instance.data["item"]
|
||||
asset_name = instance.data["asset"]
|
||||
segment_name = segment.name.get_value()
|
||||
clip_path = instance.data["path"]
|
||||
sequence_clip = instance.context.data["flameSequence"]
|
||||
clip_data = instance.data["flameSourceClip"]
|
||||
clip = clip_data["PyClip"]
|
||||
|
||||
# segment's parent track name
|
||||
s_track_name = segment.parent.name.get_value()
|
||||
|
|
@ -73,19 +69,60 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
# get media source first frame
|
||||
source_first_frame = instance.data["sourceFirstFrame"]
|
||||
|
||||
self.log.debug("_ frame_start: {}".format(frame_start))
|
||||
self.log.debug("_ source_first_frame: {}".format(source_first_frame))
|
||||
|
||||
# get timeline in/out of segment
|
||||
clip_in = instance.data["clipIn"]
|
||||
clip_out = instance.data["clipOut"]
|
||||
|
||||
# get retimed attributres
|
||||
retimed_data = self._get_retimed_attributes(instance)
|
||||
|
||||
# get individual keys
|
||||
r_handle_start = retimed_data["handle_start"]
|
||||
r_handle_end = retimed_data["handle_end"]
|
||||
r_source_dur = retimed_data["source_duration"]
|
||||
r_speed = retimed_data["speed"]
|
||||
|
||||
# get handles value - take only the max from both
|
||||
handle_start = instance.data["handleStart"]
|
||||
handle_end = instance.data["handleStart"]
|
||||
handle_end = instance.data["handleEnd"]
|
||||
handles = max(handle_start, handle_end)
|
||||
include_handles = instance.data.get("includeHandles")
|
||||
|
||||
# get media source range with handles
|
||||
source_end_handles = instance.data["sourceEndH"]
|
||||
source_start_handles = instance.data["sourceStartH"]
|
||||
source_end_handles = instance.data["sourceEndH"]
|
||||
# retime if needed
|
||||
if r_speed != 1.0:
|
||||
source_start_handles = (
|
||||
instance.data["sourceStart"] - r_handle_start)
|
||||
source_end_handles = (
|
||||
source_start_handles
|
||||
+ (r_source_dur - 1)
|
||||
+ r_handle_start
|
||||
+ r_handle_end
|
||||
)
|
||||
|
||||
# get frame range with handles for representation range
|
||||
frame_start_handle = frame_start - handle_start
|
||||
repre_frame_start = frame_start_handle
|
||||
if include_handles:
|
||||
if r_speed == 1.0:
|
||||
frame_start_handle = frame_start
|
||||
else:
|
||||
frame_start_handle = (
|
||||
frame_start - handle_start) + r_handle_start
|
||||
|
||||
self.log.debug("_ frame_start_handle: {}".format(
|
||||
frame_start_handle))
|
||||
self.log.debug("_ repre_frame_start: {}".format(
|
||||
repre_frame_start))
|
||||
|
||||
# calculate duration with handles
|
||||
source_duration_handles = (
|
||||
source_end_handles - source_start_handles) + 1
|
||||
|
||||
# create staging dir path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
|
@ -93,174 +130,285 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
# add default preset type for thumbnail and reviewable video
|
||||
# update them with settings and override in case the same
|
||||
# are found in there
|
||||
export_presets = deepcopy(self.default_presets)
|
||||
_preset_keys = [k.split('_')[0] for k in self.export_presets_mapping]
|
||||
export_presets = {
|
||||
k: v for k, v in deepcopy(self.default_presets).items()
|
||||
if k not in _preset_keys
|
||||
}
|
||||
export_presets.update(self.export_presets_mapping)
|
||||
|
||||
if not instance.data.get("versionData"):
|
||||
instance.data["versionData"] = {}
|
||||
|
||||
# set versiondata if any retime
|
||||
version_data = retimed_data.get("version_data")
|
||||
self.log.debug("_ version_data: {}".format(version_data))
|
||||
|
||||
if version_data:
|
||||
instance.data["versionData"].update(version_data)
|
||||
|
||||
if r_speed != 1.0:
|
||||
instance.data["versionData"].update({
|
||||
"frameStart": frame_start_handle,
|
||||
"frameEnd": (
|
||||
(frame_start_handle + source_duration_handles - 1)
|
||||
- (r_handle_start + r_handle_end)
|
||||
)
|
||||
})
|
||||
self.log.debug("_ i_version_data: {}".format(
|
||||
instance.data["versionData"]
|
||||
))
|
||||
|
||||
# loop all preset names and
|
||||
for unique_name, preset_config in export_presets.items():
|
||||
modify_xml_data = {}
|
||||
|
||||
if self._should_skip(preset_config, clip_path, unique_name):
|
||||
continue
|
||||
|
||||
# get all presets attributes
|
||||
extension = preset_config["ext"]
|
||||
preset_file = preset_config["xml_preset_file"]
|
||||
preset_dir = preset_config["xml_preset_dir"]
|
||||
export_type = preset_config["export_type"]
|
||||
repre_tags = preset_config["representation_tags"]
|
||||
ignore_comment_attrs = preset_config["ignore_comment_attrs"]
|
||||
parsed_comment_attrs = preset_config["parsed_comment_attrs"]
|
||||
color_out = preset_config["colorspace_out"]
|
||||
|
||||
# get frame range with handles for representation range
|
||||
frame_start_handle = frame_start - handle_start
|
||||
source_duration_handles = (
|
||||
source_end_handles - source_start_handles) + 1
|
||||
self.log.info(
|
||||
"Processing `{}` as `{}` to `{}` type...".format(
|
||||
preset_file, export_type, extension
|
||||
)
|
||||
)
|
||||
|
||||
# define in/out marks
|
||||
in_mark = (source_start_handles - source_first_frame) + 1
|
||||
out_mark = in_mark + source_duration_handles
|
||||
|
||||
# by default export source clips
|
||||
exporting_clip = clip
|
||||
exporting_clip = None
|
||||
name_patern_xml = "<name>_{}.".format(
|
||||
unique_name)
|
||||
|
||||
if export_type == "Sequence Publish":
|
||||
# change export clip to sequence
|
||||
exporting_clip = sequence_clip
|
||||
exporting_clip = flame.duplicate(sequence_clip)
|
||||
|
||||
# change in/out marks to timeline in/out
|
||||
# only keep visible layer where instance segment is child
|
||||
self.hide_others(
|
||||
exporting_clip, segment_name, s_track_name)
|
||||
|
||||
# change name patern
|
||||
name_patern_xml = (
|
||||
"<segment name>_<shot name>_{}.").format(
|
||||
unique_name)
|
||||
|
||||
# only for h264 with baked retime
|
||||
in_mark = clip_in
|
||||
out_mark = clip_out
|
||||
|
||||
# add xml tags modifications
|
||||
out_mark = clip_out + 1
|
||||
modify_xml_data.update({
|
||||
"exportHandles": True,
|
||||
"nbHandles": handles,
|
||||
"startFrame": frame_start
|
||||
"nbHandles": handles
|
||||
})
|
||||
else:
|
||||
in_mark = (source_start_handles - source_first_frame) + 1
|
||||
out_mark = in_mark + source_duration_handles
|
||||
exporting_clip = self.import_clip(clip_path)
|
||||
exporting_clip.name.set_value("{}_{}".format(
|
||||
asset_name, segment_name))
|
||||
|
||||
if not ignore_comment_attrs:
|
||||
# add any xml overrides collected form segment.comment
|
||||
modify_xml_data.update(instance.data["xml_overrides"])
|
||||
# add xml tags modifications
|
||||
modify_xml_data.update({
|
||||
# enum position low start from 0
|
||||
"frameIndex": 0,
|
||||
"startFrame": repre_frame_start,
|
||||
"namePattern": name_patern_xml
|
||||
})
|
||||
|
||||
self.log.debug("__ modify_xml_data: {}".format(pformat(
|
||||
modify_xml_data
|
||||
)))
|
||||
if parsed_comment_attrs:
|
||||
# add any xml overrides collected form segment.comment
|
||||
modify_xml_data.update(instance.data["xml_overrides"])
|
||||
|
||||
# with maintained duplication loop all presets
|
||||
with opfapi.maintained_object_duplication(
|
||||
exporting_clip) as duplclip:
|
||||
kwargs = {}
|
||||
self.log.debug("_ in_mark: {}".format(in_mark))
|
||||
self.log.debug("_ out_mark: {}".format(out_mark))
|
||||
|
||||
if export_type == "Sequence Publish":
|
||||
# only keep visible layer where instance segment is child
|
||||
self.hide_other_tracks(duplclip, s_track_name)
|
||||
export_kwargs = {}
|
||||
# validate xml preset file is filled
|
||||
if preset_file == "":
|
||||
raise ValueError(
|
||||
("Check Settings for {} preset: "
|
||||
"`XML preset file` is not filled").format(
|
||||
unique_name)
|
||||
)
|
||||
|
||||
# validate xml preset file is filled
|
||||
if preset_file == "":
|
||||
# resolve xml preset dir if not filled
|
||||
if preset_dir == "":
|
||||
preset_dir = opfapi.get_preset_path_by_xml_name(
|
||||
preset_file)
|
||||
|
||||
if not preset_dir:
|
||||
raise ValueError(
|
||||
("Check Settings for {} preset: "
|
||||
"`XML preset file` is not filled").format(
|
||||
unique_name)
|
||||
"`XML preset file` {} is not found").format(
|
||||
unique_name, preset_file)
|
||||
)
|
||||
|
||||
# resolve xml preset dir if not filled
|
||||
if preset_dir == "":
|
||||
preset_dir = opfapi.get_preset_path_by_xml_name(
|
||||
preset_file)
|
||||
# create preset path
|
||||
preset_orig_xml_path = str(os.path.join(
|
||||
preset_dir, preset_file
|
||||
))
|
||||
|
||||
if not preset_dir:
|
||||
raise ValueError(
|
||||
("Check Settings for {} preset: "
|
||||
"`XML preset file` {} is not found").format(
|
||||
unique_name, preset_file)
|
||||
)
|
||||
# define kwargs based on preset type
|
||||
if "thumbnail" in unique_name:
|
||||
modify_xml_data.update({
|
||||
"video/posterFrame": True,
|
||||
"video/useFrameAsPoster": 1,
|
||||
"namePattern": "__thumbnail"
|
||||
})
|
||||
thumb_frame_number = int(in_mark + (
|
||||
(out_mark - in_mark + 1) / 2))
|
||||
|
||||
# create preset path
|
||||
preset_orig_xml_path = str(os.path.join(
|
||||
preset_dir, preset_file
|
||||
self.log.debug("__ thumb_frame_number: {}".format(
|
||||
thumb_frame_number
|
||||
))
|
||||
|
||||
preset_path = opfapi.modify_preset_file(
|
||||
preset_orig_xml_path, staging_dir, modify_xml_data)
|
||||
export_kwargs["thumb_frame_number"] = thumb_frame_number
|
||||
else:
|
||||
export_kwargs.update({
|
||||
"in_mark": in_mark,
|
||||
"out_mark": out_mark
|
||||
})
|
||||
|
||||
# define kwargs based on preset type
|
||||
if "thumbnail" in unique_name:
|
||||
kwargs["thumb_frame_number"] = in_mark + (
|
||||
source_duration_handles / 2)
|
||||
else:
|
||||
kwargs.update({
|
||||
"in_mark": in_mark,
|
||||
"out_mark": out_mark
|
||||
})
|
||||
preset_path = opfapi.modify_preset_file(
|
||||
preset_orig_xml_path, staging_dir, modify_xml_data)
|
||||
|
||||
# get and make export dir paths
|
||||
export_dir_path = str(os.path.join(
|
||||
staging_dir, unique_name
|
||||
))
|
||||
os.makedirs(export_dir_path)
|
||||
# get and make export dir paths
|
||||
export_dir_path = str(os.path.join(
|
||||
staging_dir, unique_name
|
||||
))
|
||||
os.makedirs(export_dir_path)
|
||||
|
||||
# export
|
||||
opfapi.export_clip(
|
||||
export_dir_path, duplclip, preset_path, **kwargs)
|
||||
# export
|
||||
opfapi.export_clip(
|
||||
export_dir_path, exporting_clip, preset_path, **export_kwargs)
|
||||
|
||||
extension = preset_config["ext"]
|
||||
repr_name = unique_name
|
||||
# make sure only first segment is used if underscore in name
|
||||
# HACK: `ftrackreview_withLUT` will result only in `ftrackreview`
|
||||
if (
|
||||
"thumbnail" in unique_name
|
||||
or "ftrackreview" in unique_name
|
||||
):
|
||||
repr_name = unique_name.split("_")[0]
|
||||
|
||||
# create representation data
|
||||
representation_data = {
|
||||
"name": unique_name,
|
||||
"outputName": unique_name,
|
||||
"ext": extension,
|
||||
"stagingDir": export_dir_path,
|
||||
"tags": repre_tags,
|
||||
"data": {
|
||||
"colorspace": color_out
|
||||
}
|
||||
}
|
||||
# create representation data
|
||||
representation_data = {
|
||||
"name": repr_name,
|
||||
"outputName": repr_name,
|
||||
"ext": extension,
|
||||
"stagingDir": export_dir_path,
|
||||
"tags": repre_tags,
|
||||
"data": {
|
||||
"colorspace": color_out
|
||||
},
|
||||
"load_to_batch_group": preset_config.get(
|
||||
"load_to_batch_group"),
|
||||
"batch_group_loader_name": preset_config.get(
|
||||
"batch_group_loader_name") or None
|
||||
}
|
||||
|
||||
# collect all available content of export dir
|
||||
files = os.listdir(export_dir_path)
|
||||
# collect all available content of export dir
|
||||
files = os.listdir(export_dir_path)
|
||||
|
||||
# make sure no nested folders inside
|
||||
n_stage_dir, n_files = self._unfolds_nested_folders(
|
||||
export_dir_path, files, extension)
|
||||
# make sure no nested folders inside
|
||||
n_stage_dir, n_files = self._unfolds_nested_folders(
|
||||
export_dir_path, files, extension)
|
||||
|
||||
# fix representation in case of nested folders
|
||||
if n_stage_dir:
|
||||
representation_data["stagingDir"] = n_stage_dir
|
||||
files = n_files
|
||||
# fix representation in case of nested folders
|
||||
if n_stage_dir:
|
||||
representation_data["stagingDir"] = n_stage_dir
|
||||
files = n_files
|
||||
|
||||
# add files to represetation but add
|
||||
# imagesequence as list
|
||||
if (
|
||||
# first check if path in files is not mov extension
|
||||
[
|
||||
f for f in files
|
||||
if os.path.splitext(f)[-1] == ".mov"
|
||||
]
|
||||
# then try if thumbnail is not in unique name
|
||||
or unique_name == "thumbnail"
|
||||
):
|
||||
representation_data["files"] = files.pop()
|
||||
else:
|
||||
representation_data["files"] = files
|
||||
# add files to represetation but add
|
||||
# imagesequence as list
|
||||
if (
|
||||
# first check if path in files is not mov extension
|
||||
[
|
||||
f for f in files
|
||||
if os.path.splitext(f)[-1] == ".mov"
|
||||
]
|
||||
# then try if thumbnail is not in unique name
|
||||
or repr_name == "thumbnail"
|
||||
):
|
||||
representation_data["files"] = files.pop()
|
||||
else:
|
||||
representation_data["files"] = files
|
||||
|
||||
# add frame range
|
||||
if preset_config["representation_add_range"]:
|
||||
representation_data.update({
|
||||
"frameStart": frame_start_handle,
|
||||
"frameEnd": (
|
||||
frame_start_handle + source_duration_handles),
|
||||
"fps": instance.data["fps"]
|
||||
})
|
||||
# add frame range
|
||||
if preset_config["representation_add_range"]:
|
||||
representation_data.update({
|
||||
"frameStart": repre_frame_start,
|
||||
"frameEnd": (
|
||||
repre_frame_start + source_duration_handles) - 1,
|
||||
"fps": instance.data["fps"]
|
||||
})
|
||||
|
||||
instance.data["representations"].append(representation_data)
|
||||
instance.data["representations"].append(representation_data)
|
||||
|
||||
# add review family if found in tags
|
||||
if "review" in repre_tags:
|
||||
instance.data["families"].append("review")
|
||||
# add review family if found in tags
|
||||
if "review" in repre_tags:
|
||||
instance.data["families"].append("review")
|
||||
|
||||
self.log.info("Added representation: {}".format(
|
||||
representation_data))
|
||||
self.log.info("Added representation: {}".format(
|
||||
representation_data))
|
||||
|
||||
self.log.debug("All representations: {}".format(
|
||||
pformat(instance.data["representations"])))
|
||||
if export_type == "Sequence Publish":
|
||||
# at the end remove the duplicated clip
|
||||
flame.delete(exporting_clip)
|
||||
|
||||
def _get_retimed_attributes(self, instance):
|
||||
handle_start = instance.data["handleStart"]
|
||||
handle_end = instance.data["handleEnd"]
|
||||
|
||||
# get basic variables
|
||||
otio_clip = instance.data["otioClip"]
|
||||
|
||||
# get available range trimmed with processed retimes
|
||||
retimed_attributes = get_media_range_with_retimes(
|
||||
otio_clip, handle_start, handle_end)
|
||||
self.log.debug(
|
||||
">> retimed_attributes: {}".format(retimed_attributes))
|
||||
|
||||
r_media_in = int(retimed_attributes["mediaIn"])
|
||||
r_media_out = int(retimed_attributes["mediaOut"])
|
||||
version_data = retimed_attributes.get("versionData")
|
||||
|
||||
return {
|
||||
"version_data": version_data,
|
||||
"handle_start": int(retimed_attributes["handleStart"]),
|
||||
"handle_end": int(retimed_attributes["handleEnd"]),
|
||||
"source_duration": (
|
||||
(r_media_out - r_media_in) + 1
|
||||
),
|
||||
"speed": float(retimed_attributes["speed"])
|
||||
}
|
||||
|
||||
def _should_skip(self, preset_config, clip_path, unique_name):
|
||||
# get activating attributes
|
||||
activated_preset = preset_config["active"]
|
||||
filter_path_regex = preset_config.get("filter_path_regex")
|
||||
|
||||
self.log.info(
|
||||
"Preset `{}` is active `{}` with filter `{}`".format(
|
||||
unique_name, activated_preset, filter_path_regex
|
||||
)
|
||||
)
|
||||
|
||||
# skip if not activated presete
|
||||
if not activated_preset:
|
||||
return True
|
||||
|
||||
# exclude by regex filter if any
|
||||
if (
|
||||
filter_path_regex
|
||||
and not re.search(filter_path_regex, clip_path)
|
||||
):
|
||||
return True
|
||||
|
||||
def _unfolds_nested_folders(self, stage_dir, files_list, ext):
|
||||
"""Unfolds nested folders
|
||||
|
|
@ -322,18 +470,77 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
|
||||
return new_stage_dir, new_files_list
|
||||
|
||||
def hide_other_tracks(self, sequence_clip, track_name):
|
||||
def hide_others(self, sequence_clip, segment_name, track_name):
|
||||
"""Helper method used only if sequence clip is used
|
||||
|
||||
Args:
|
||||
sequence_clip (flame.Clip): sequence clip
|
||||
segment_name (str): segment name
|
||||
track_name (str): track name
|
||||
"""
|
||||
# create otio tracks and clips
|
||||
for ver in sequence_clip.versions:
|
||||
for track in ver.tracks:
|
||||
if len(track.segments) == 0 and track.hidden:
|
||||
if len(track.segments) == 0 and track.hidden.get_value():
|
||||
continue
|
||||
|
||||
# hide tracks which are not parent track
|
||||
if track.name.get_value() != track_name:
|
||||
track.hidden = True
|
||||
continue
|
||||
|
||||
# hidde all other segments
|
||||
for segment in track.segments:
|
||||
if segment.name.get_value() != segment_name:
|
||||
segment.hidden = True
|
||||
|
||||
def import_clip(self, path):
|
||||
"""
|
||||
Import clip from path
|
||||
"""
|
||||
dir_path = os.path.dirname(path)
|
||||
media_info = MediaInfoFile(path, logger=self.log)
|
||||
file_pattern = media_info.file_pattern
|
||||
self.log.debug("__ file_pattern: {}".format(file_pattern))
|
||||
|
||||
# rejoin the pattern to dir path
|
||||
new_path = os.path.join(dir_path, file_pattern)
|
||||
|
||||
clips = flame.import_clips(new_path)
|
||||
self.log.info("Clips [{}] imported from `{}`".format(clips, path))
|
||||
|
||||
if not clips:
|
||||
self.log.warning("Path `{}` is not having any clips".format(path))
|
||||
return None
|
||||
elif len(clips) > 1:
|
||||
self.log.warning(
|
||||
"Path `{}` is containing more that one clip".format(path)
|
||||
)
|
||||
return clips[0]
|
||||
|
||||
def staging_dir(self, instance):
|
||||
"""Provide a temporary directory in which to store extracted files
|
||||
|
||||
Upon calling this method the staging directory is stored inside
|
||||
the instance.data['stagingDir']
|
||||
"""
|
||||
staging_dir = instance.data.get('stagingDir', None)
|
||||
openpype_temp_dir = os.getenv("OPENPYPE_TEMP_DIR")
|
||||
|
||||
if not staging_dir:
|
||||
if openpype_temp_dir and os.path.exists(openpype_temp_dir):
|
||||
staging_dir = os.path.normpath(
|
||||
tempfile.mkdtemp(
|
||||
prefix="pyblish_tmp_",
|
||||
dir=openpype_temp_dir
|
||||
)
|
||||
)
|
||||
else:
|
||||
staging_dir = os.path.normpath(
|
||||
tempfile.mkdtemp(prefix="pyblish_tmp_")
|
||||
)
|
||||
instance.data['stagingDir'] = staging_dir
|
||||
|
||||
instance.context.data["cleanupFullPaths"].append(staging_dir)
|
||||
|
||||
return staging_dir
|
||||
|
|
|
|||
336
openpype/hosts/flame/plugins/publish/integrate_batch_group.py
Normal file
336
openpype/hosts/flame/plugins/publish/integrate_batch_group.py
Normal file
|
|
@ -0,0 +1,336 @@
|
|||
import os
|
||||
import copy
|
||||
from collections import OrderedDict
|
||||
from pprint import pformat
|
||||
import pyblish
|
||||
import openpype.hosts.flame.api as opfapi
|
||||
import openpype.pipeline as op_pipeline
|
||||
from openpype.pipeline.workfile import get_workdir
|
||||
|
||||
|
||||
class IntegrateBatchGroup(pyblish.api.InstancePlugin):
|
||||
"""Integrate published shot to batch group"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 0.45
|
||||
label = "Integrate Batch Groups"
|
||||
hosts = ["flame"]
|
||||
families = ["clip"]
|
||||
|
||||
# settings
|
||||
default_loader = "LoadClip"
|
||||
|
||||
def process(self, instance):
|
||||
add_tasks = instance.data["flameAddTasks"]
|
||||
|
||||
# iterate all tasks from settings
|
||||
for task_data in add_tasks:
|
||||
# exclude batch group
|
||||
if not task_data["create_batch_group"]:
|
||||
continue
|
||||
|
||||
# create or get already created batch group
|
||||
bgroup = self._get_batch_group(instance, task_data)
|
||||
|
||||
# add batch group content
|
||||
all_batch_nodes = self._add_nodes_to_batch_with_links(
|
||||
instance, task_data, bgroup)
|
||||
|
||||
for name, node in all_batch_nodes.items():
|
||||
self.log.debug("name: {}, dir: {}".format(
|
||||
name, dir(node)
|
||||
))
|
||||
self.log.debug("__ node.attributes: {}".format(
|
||||
node.attributes
|
||||
))
|
||||
|
||||
# load plate to batch group
|
||||
self.log.info("Loading subset `{}` into batch `{}`".format(
|
||||
instance.data["subset"], bgroup.name.get_value()
|
||||
))
|
||||
self._load_clip_to_context(instance, bgroup)
|
||||
|
||||
def _add_nodes_to_batch_with_links(self, instance, task_data, batch_group):
|
||||
# get write file node properties > OrederDict because order does mater
|
||||
write_pref_data = self._get_write_prefs(instance, task_data)
|
||||
|
||||
batch_nodes = [
|
||||
{
|
||||
"type": "comp",
|
||||
"properties": {},
|
||||
"id": "comp_node01"
|
||||
},
|
||||
{
|
||||
"type": "Write File",
|
||||
"properties": write_pref_data,
|
||||
"id": "write_file_node01"
|
||||
}
|
||||
]
|
||||
batch_links = [
|
||||
{
|
||||
"from_node": {
|
||||
"id": "comp_node01",
|
||||
"connector": "Result"
|
||||
},
|
||||
"to_node": {
|
||||
"id": "write_file_node01",
|
||||
"connector": "Front"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
# add nodes into batch group
|
||||
return opfapi.create_batch_group_conent(
|
||||
batch_nodes, batch_links, batch_group)
|
||||
|
||||
def _load_clip_to_context(self, instance, bgroup):
|
||||
# get all loaders for host
|
||||
loaders_by_name = {
|
||||
loader.__name__: loader
|
||||
for loader in op_pipeline.discover_loader_plugins()
|
||||
}
|
||||
|
||||
# get all published representations
|
||||
published_representations = instance.data["published_representations"]
|
||||
repres_db_id_by_name = {
|
||||
repre_info["representation"]["name"]: repre_id
|
||||
for repre_id, repre_info in published_representations.items()
|
||||
}
|
||||
|
||||
# get all loadable representations
|
||||
repres_by_name = {
|
||||
repre["name"]: repre for repre in instance.data["representations"]
|
||||
}
|
||||
|
||||
# get repre_id for the loadable representations
|
||||
loader_name_by_repre_id = {
|
||||
repres_db_id_by_name[repr_name]: {
|
||||
"loader": repr_data["batch_group_loader_name"],
|
||||
# add repre data for exception logging
|
||||
"_repre_data": repr_data
|
||||
}
|
||||
for repr_name, repr_data in repres_by_name.items()
|
||||
if repr_data.get("load_to_batch_group")
|
||||
}
|
||||
|
||||
self.log.debug("__ loader_name_by_repre_id: {}".format(pformat(
|
||||
loader_name_by_repre_id)))
|
||||
|
||||
# get representation context from the repre_id
|
||||
repre_contexts = op_pipeline.load.get_repres_contexts(
|
||||
loader_name_by_repre_id.keys())
|
||||
|
||||
self.log.debug("__ repre_contexts: {}".format(pformat(
|
||||
repre_contexts)))
|
||||
|
||||
# loop all returned repres from repre_context dict
|
||||
for repre_id, repre_context in repre_contexts.items():
|
||||
self.log.debug("__ repre_id: {}".format(repre_id))
|
||||
# get loader name by representation id
|
||||
loader_name = (
|
||||
loader_name_by_repre_id[repre_id]["loader"]
|
||||
# if nothing was added to settings fallback to default
|
||||
or self.default_loader
|
||||
)
|
||||
|
||||
# get loader plugin
|
||||
loader_plugin = loaders_by_name.get(loader_name)
|
||||
if loader_plugin:
|
||||
# load to flame by representation context
|
||||
try:
|
||||
op_pipeline.load.load_with_repre_context(
|
||||
loader_plugin, repre_context, **{
|
||||
"data": {
|
||||
"workdir": self.task_workdir,
|
||||
"batch": bgroup
|
||||
}
|
||||
})
|
||||
except op_pipeline.load.IncompatibleLoaderError as msg:
|
||||
self.log.error(
|
||||
"Check allowed representations for Loader `{}` "
|
||||
"in settings > error: {}".format(
|
||||
loader_plugin.__name__, msg))
|
||||
self.log.error(
|
||||
"Representaton context >>{}<< is not compatible "
|
||||
"with loader `{}`".format(
|
||||
pformat(repre_context), loader_plugin.__name__
|
||||
)
|
||||
)
|
||||
else:
|
||||
self.log.warning(
|
||||
"Something got wrong and there is not Loader found for "
|
||||
"following data: {}".format(
|
||||
pformat(loader_name_by_repre_id))
|
||||
)
|
||||
|
||||
def _get_batch_group(self, instance, task_data):
|
||||
frame_start = instance.data["frameStart"]
|
||||
frame_end = instance.data["frameEnd"]
|
||||
handle_start = instance.data["handleStart"]
|
||||
handle_end = instance.data["handleEnd"]
|
||||
frame_duration = (frame_end - frame_start) + 1
|
||||
asset_name = instance.data["asset"]
|
||||
|
||||
task_name = task_data["name"]
|
||||
batchgroup_name = "{}_{}".format(asset_name, task_name)
|
||||
|
||||
batch_data = {
|
||||
"shematic_reels": [
|
||||
"OP_LoadedReel"
|
||||
],
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end
|
||||
}
|
||||
self.log.debug(
|
||||
"__ batch_data: {}".format(pformat(batch_data)))
|
||||
|
||||
# check if the batch group already exists
|
||||
bgroup = opfapi.get_batch_group_from_desktop(batchgroup_name)
|
||||
|
||||
if not bgroup:
|
||||
self.log.info(
|
||||
"Creating new batch group: {}".format(batchgroup_name))
|
||||
# create batch with utils
|
||||
bgroup = opfapi.create_batch_group(
|
||||
batchgroup_name,
|
||||
frame_start,
|
||||
frame_duration,
|
||||
**batch_data
|
||||
)
|
||||
|
||||
else:
|
||||
self.log.info(
|
||||
"Updating batch group: {}".format(batchgroup_name))
|
||||
# update already created batch group
|
||||
bgroup = opfapi.create_batch_group(
|
||||
batchgroup_name,
|
||||
frame_start,
|
||||
frame_duration,
|
||||
update_batch_group=bgroup,
|
||||
**batch_data
|
||||
)
|
||||
|
||||
return bgroup
|
||||
|
||||
def _get_anamoty_data_with_current_task(self, instance, task_data):
|
||||
anatomy_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
task_name = task_data["name"]
|
||||
task_type = task_data["type"]
|
||||
anatomy_obj = instance.context.data["anatomy"]
|
||||
|
||||
# update task data in anatomy data
|
||||
project_task_types = anatomy_obj["tasks"]
|
||||
task_code = project_task_types.get(task_type, {}).get("short_name")
|
||||
anatomy_data.update({
|
||||
"task": {
|
||||
"name": task_name,
|
||||
"type": task_type,
|
||||
"short": task_code
|
||||
}
|
||||
})
|
||||
return anatomy_data
|
||||
|
||||
def _get_write_prefs(self, instance, task_data):
|
||||
# update task in anatomy data
|
||||
anatomy_data = self._get_anamoty_data_with_current_task(
|
||||
instance, task_data)
|
||||
|
||||
self.task_workdir = self._get_shot_task_dir_path(
|
||||
instance, task_data)
|
||||
self.log.debug("__ task_workdir: {}".format(
|
||||
self.task_workdir))
|
||||
|
||||
# TODO: this might be done with template in settings
|
||||
render_dir_path = os.path.join(
|
||||
self.task_workdir, "render", "flame")
|
||||
|
||||
if not os.path.exists(render_dir_path):
|
||||
os.makedirs(render_dir_path, mode=0o777)
|
||||
|
||||
# TODO: add most of these to `imageio/flame/batch/write_node`
|
||||
name = "{project[code]}_{asset}_{task[name]}".format(
|
||||
**anatomy_data
|
||||
)
|
||||
|
||||
# The path attribute where the rendered clip is exported
|
||||
# /path/to/file.[0001-0010].exr
|
||||
media_path = render_dir_path
|
||||
# name of file represented by tokens
|
||||
media_path_pattern = (
|
||||
"<name>_v<iteration###>/<name>_v<iteration###>.<frame><ext>")
|
||||
# The Create Open Clip attribute of the Write File node. \
|
||||
# Determines if an Open Clip is created by the Write File node.
|
||||
create_clip = True
|
||||
# The Include Setup attribute of the Write File node.
|
||||
# Determines if a Batch Setup file is created by the Write File node.
|
||||
include_setup = True
|
||||
# The path attribute where the Open Clip file is exported by
|
||||
# the Write File node.
|
||||
create_clip_path = "<name>"
|
||||
# The path attribute where the Batch setup file
|
||||
# is exported by the Write File node.
|
||||
include_setup_path = "./<name>_v<iteration###>"
|
||||
# The file type for the files written by the Write File node.
|
||||
# Setting this attribute also overwrites format_extension,
|
||||
# bit_depth and compress_mode to match the defaults for
|
||||
# this file type.
|
||||
file_type = "OpenEXR"
|
||||
# The file extension for the files written by the Write File node.
|
||||
# This attribute resets to match file_type whenever file_type
|
||||
# is set. If you require a specific extension, you must
|
||||
# set format_extension after setting file_type.
|
||||
format_extension = "exr"
|
||||
# The bit depth for the files written by the Write File node.
|
||||
# This attribute resets to match file_type whenever file_type is set.
|
||||
bit_depth = "16"
|
||||
# The compressing attribute for the files exported by the Write
|
||||
# File node. Only relevant when file_type in 'OpenEXR', 'Sgi', 'Tiff'
|
||||
compress = True
|
||||
# The compression format attribute for the specific File Types
|
||||
# export by the Write File node. You must set compress_mode
|
||||
# after setting file_type.
|
||||
compress_mode = "DWAB"
|
||||
# The frame index mode attribute of the Write File node.
|
||||
# Value range: `Use Timecode` or `Use Start Frame`
|
||||
frame_index_mode = "Use Start Frame"
|
||||
frame_padding = 6
|
||||
# The versioning mode of the Open Clip exported by the Write File node.
|
||||
# Only available if create_clip = True.
|
||||
version_mode = "Follow Iteration"
|
||||
version_name = "v<version>"
|
||||
version_padding = 3
|
||||
|
||||
# need to make sure the order of keys is correct
|
||||
return OrderedDict((
|
||||
("name", name),
|
||||
("media_path", media_path),
|
||||
("media_path_pattern", media_path_pattern),
|
||||
("create_clip", create_clip),
|
||||
("include_setup", include_setup),
|
||||
("create_clip_path", create_clip_path),
|
||||
("include_setup_path", include_setup_path),
|
||||
("file_type", file_type),
|
||||
("format_extension", format_extension),
|
||||
("bit_depth", bit_depth),
|
||||
("compress", compress),
|
||||
("compress_mode", compress_mode),
|
||||
("frame_index_mode", frame_index_mode),
|
||||
("frame_padding", frame_padding),
|
||||
("version_mode", version_mode),
|
||||
("version_name", version_name),
|
||||
("version_padding", version_padding)
|
||||
))
|
||||
|
||||
def _get_shot_task_dir_path(self, instance, task_data):
|
||||
project_doc = instance.data["projectEntity"]
|
||||
asset_entity = instance.data["assetEntity"]
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
project_settings = instance.context.data["project_settings"]
|
||||
|
||||
return get_workdir(
|
||||
project_doc,
|
||||
asset_entity,
|
||||
task_data["name"],
|
||||
"flame",
|
||||
anatomy,
|
||||
project_settings=project_settings
|
||||
)
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
import pyblish
|
||||
|
||||
|
||||
@pyblish.api.log
|
||||
class ValidateSourceClip(pyblish.api.InstancePlugin):
|
||||
"""Validate instance is not having empty `flameSourceClip`"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Source Clip"
|
||||
hosts = ["flame"]
|
||||
families = ["clip"]
|
||||
|
||||
def process(self, instance):
|
||||
flame_source_clip = instance.data["flameSourceClip"]
|
||||
|
||||
self.log.debug("_ flame_source_clip: {}".format(flame_source_clip))
|
||||
|
||||
if flame_source_clip is None:
|
||||
raise AttributeError((
|
||||
"Timeline segment `{}` is not having "
|
||||
"relative clip in reels. Please make sure "
|
||||
"you push `Save Sources` button in Conform Tab").format(
|
||||
instance.data["asset"]
|
||||
))
|
||||
|
|
@ -3,18 +3,19 @@ import sys
|
|||
from Qt import QtWidgets
|
||||
from pprint import pformat
|
||||
import atexit
|
||||
import openpype
|
||||
import avalon
|
||||
|
||||
import openpype.hosts.flame.api as opfapi
|
||||
from openpype.pipeline import (
|
||||
install_host,
|
||||
registered_host,
|
||||
)
|
||||
|
||||
|
||||
def openpype_install():
|
||||
"""Registering OpenPype in context
|
||||
"""
|
||||
openpype.install()
|
||||
avalon.api.install(opfapi)
|
||||
print("Avalon registered hosts: {}".format(
|
||||
avalon.api.registered_host()))
|
||||
install_host(opfapi)
|
||||
print("Registered host: {}".format(registered_host()))
|
||||
|
||||
|
||||
# Exception handler
|
||||
|
|
|
|||
|
|
@ -3,11 +3,20 @@ import sys
|
|||
import re
|
||||
import contextlib
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
from Qt import QtGui
|
||||
|
||||
from avalon import io
|
||||
from openpype.pipeline import switch_container
|
||||
from openpype.client import (
|
||||
get_asset_by_name,
|
||||
get_subset_by_name,
|
||||
get_last_version_by_subset_id,
|
||||
get_representation_by_id,
|
||||
get_representation_by_name,
|
||||
get_representation_parents,
|
||||
)
|
||||
from openpype.pipeline import (
|
||||
switch_container,
|
||||
legacy_io,
|
||||
)
|
||||
from .pipeline import get_current_comp, comp_lock_and_undo_chunk
|
||||
|
||||
self = sys.modules[__name__]
|
||||
|
|
@ -91,11 +100,16 @@ def switch_item(container,
|
|||
raise ValueError("Must have at least one change provided to switch.")
|
||||
|
||||
# Collect any of current asset, subset and representation if not provided
|
||||
# so we can use the original name from those.
|
||||
# so we can use the original name from those.
|
||||
project_name = legacy_io.active_project()
|
||||
if any(not x for x in [asset_name, subset_name, representation_name]):
|
||||
_id = ObjectId(container["representation"])
|
||||
representation = io.find_one({"type": "representation", "_id": _id})
|
||||
version, subset, asset, project = io.parenthood(representation)
|
||||
repre_id = container["representation"]
|
||||
representation = get_representation_by_id(project_name, repre_id)
|
||||
repre_parent_docs = get_representation_parents(representation)
|
||||
if repre_parent_docs:
|
||||
version, subset, asset, _ = repre_parent_docs
|
||||
else:
|
||||
version = subset = asset = None
|
||||
|
||||
if asset_name is None:
|
||||
asset_name = asset["name"]
|
||||
|
|
@ -107,39 +121,26 @@ def switch_item(container,
|
|||
representation_name = representation["name"]
|
||||
|
||||
# Find the new one
|
||||
asset = io.find_one({
|
||||
"name": asset_name,
|
||||
"type": "asset"
|
||||
})
|
||||
asset = get_asset_by_name(project_name, asset_name, fields=["_id"])
|
||||
assert asset, ("Could not find asset in the database with the name "
|
||||
"'%s'" % asset_name)
|
||||
|
||||
subset = io.find_one({
|
||||
"name": subset_name,
|
||||
"type": "subset",
|
||||
"parent": asset["_id"]
|
||||
})
|
||||
subset = get_subset_by_name(
|
||||
project_name, subset_name, asset["_id"], fields=["_id"]
|
||||
)
|
||||
assert subset, ("Could not find subset in the database with the name "
|
||||
"'%s'" % subset_name)
|
||||
|
||||
version = io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset["_id"]
|
||||
},
|
||||
sort=[('name', -1)]
|
||||
version = get_last_version_by_subset_id(
|
||||
project_name, subset["_id"], fields=["_id"]
|
||||
)
|
||||
|
||||
assert version, "Could not find a version for {}.{}".format(
|
||||
asset_name, subset_name
|
||||
)
|
||||
|
||||
representation = io.find_one({
|
||||
"name": representation_name,
|
||||
"type": "representation",
|
||||
"parent": version["_id"]}
|
||||
representation = get_representation_by_name(
|
||||
project_name, representation_name, version["_id"]
|
||||
)
|
||||
|
||||
assert representation, ("Could not find representation in the database "
|
||||
"with the name '%s'" % representation_name)
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import contextlib
|
|||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.api import Logger
|
||||
from openpype.lib import Logger
|
||||
from openpype.pipeline import (
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
|
|
@ -20,7 +20,7 @@ from openpype.pipeline import (
|
|||
)
|
||||
import openpype.hosts.fusion
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.fusion.__file__))
|
||||
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
||||
|
|
@ -45,7 +45,8 @@ def install():
|
|||
This is where you install menus and register families, data
|
||||
and loaders into fusion.
|
||||
|
||||
It is called automatically when installing via `api.install(avalon.fusion)`
|
||||
It is called automatically when installing via
|
||||
`openpype.pipeline.install_host(openpype.hosts.fusion.api)`
|
||||
|
||||
See the Maya equivalent for inspiration on how to implement this.
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from openpype.pipeline import load
|
|||
|
||||
|
||||
class FusionSetFrameRangeLoader(load.LoaderPlugin):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
"""Set frame range excluding pre- and post-handles"""
|
||||
|
||||
families = ["animation",
|
||||
"camera",
|
||||
|
|
@ -40,7 +40,7 @@ class FusionSetFrameRangeLoader(load.LoaderPlugin):
|
|||
|
||||
|
||||
class FusionSetFrameRangeWithHandlesLoader(load.LoaderPlugin):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
"""Set frame range including pre- and post-handles"""
|
||||
|
||||
families = ["animation",
|
||||
"camera",
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import os
|
||||
import contextlib
|
||||
|
||||
from avalon import io
|
||||
|
||||
from openpype.client import get_version_by_id
|
||||
from openpype.pipeline import (
|
||||
load,
|
||||
legacy_io,
|
||||
get_representation_path,
|
||||
)
|
||||
from openpype.hosts.fusion.api import (
|
||||
|
|
@ -124,7 +124,7 @@ def loader_shift(loader, frame, relative=True):
|
|||
class FusionLoadSequence(load.LoaderPlugin):
|
||||
"""Load image sequence into Fusion"""
|
||||
|
||||
families = ["imagesequence", "review", "render"]
|
||||
families = ["imagesequence", "review", "render", "plate"]
|
||||
representations = ["*"]
|
||||
|
||||
label = "Load sequence"
|
||||
|
|
@ -212,8 +212,8 @@ class FusionLoadSequence(load.LoaderPlugin):
|
|||
path = self._get_first_image(root)
|
||||
|
||||
# Get start frame from version data
|
||||
version = io.find_one({"type": "version",
|
||||
"_id": representation["parent"]})
|
||||
project_name = legacy_io.active_project()
|
||||
version = get_version_by_id(project_name, representation["parent"])
|
||||
start = version["data"].get("frameStart")
|
||||
if start is None:
|
||||
self.log.warning("Missing start frame for updated version"
|
||||
|
|
|
|||
|
|
@ -4,10 +4,10 @@ import getpass
|
|||
|
||||
import requests
|
||||
|
||||
from avalon import api
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
|
||||
class FusionSubmitDeadline(pyblish.api.InstancePlugin):
|
||||
"""Submit current Comp to Deadline
|
||||
|
|
@ -133,7 +133,7 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"FUSION9_MasterPrefs"
|
||||
]
|
||||
environment = dict({key: os.environ[key] for key in keys
|
||||
if key in os.environ}, **api.Session)
|
||||
if key in os.environ}, **legacy_io.Session)
|
||||
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
||||
|
|
@ -146,7 +146,7 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
url = "{}/api/jobs".format(DEADLINE_REST_URL)
|
||||
url = "{}/api/jobs".format(deadline_url)
|
||||
response = requests.post(url, json=payload)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue