diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 6ed6ae428c..96e768e420 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -6,6 +6,8 @@ labels: bug assignees: '' --- +**Running version** +[ex. 3.14.1-nightly.2] **Describe the bug** A clear and concise description of what the bug is. diff --git a/.github/workflows/prerelease.yml b/.github/workflows/prerelease.yml index bf39f8f956..078f6c85bb 100644 --- a/.github/workflows/prerelease.yml +++ b/.github/workflows/prerelease.yml @@ -37,27 +37,27 @@ jobs: echo ::set-output name=next_tag::$RESULT - - name: "✏️ Generate full changelog" - if: steps.version_type.outputs.type != 'skip' - id: generate-full-changelog - uses: heinrichreimer/github-changelog-generator-action@v2.2 - with: - token: ${{ secrets.ADMIN_TOKEN }} - addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}' - issues: false - issuesWoLabels: false - sinceTag: "3.0.0" - maxIssues: 100 - pullRequests: true - prWoLabels: false - author: false - unreleased: true - compareLink: true - stripGeneratorNotice: true - verbose: true - unreleasedLabel: ${{ steps.version.outputs.next_tag }} - excludeTagsRegex: "CI/.+" - releaseBranch: "main" + # - name: "✏️ Generate full changelog" + # if: steps.version_type.outputs.type != 'skip' + # id: generate-full-changelog + # uses: heinrichreimer/github-changelog-generator-action@v2.3 + # with: + # token: ${{ secrets.ADMIN_TOKEN }} + # addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}' + # issues: false + # issuesWoLabels: false + # sinceTag: "3.12.0" + # maxIssues: 100 + # pullRequests: true + # prWoLabels: false + # author: false + # unreleased: true + # compareLink: true + # stripGeneratorNotice: true + # verbose: true + # unreleasedLabel: ${{ steps.version.outputs.next_tag }} + # excludeTagsRegex: "CI/.+" + # releaseBranch: "main" - name: "🖨️ Print changelog to console" if: steps.version_type.outputs.type != 'skip' @@ -85,11 +85,11 @@ jobs: tags: true unprotect_reviews: true - - name: 🔨 Merge main back to develop + - name: 🔨 Merge main back to develop uses: everlytic/branch-merge@1.1.0 if: steps.version_type.outputs.type != 'skip' with: github_token: ${{ secrets.ADMIN_TOKEN }} source_ref: 'main' target_branch: 'develop' - commit_message_template: '[Automated] Merged {source_ref} into {target_branch}' \ No newline at end of file + commit_message_template: '[Automated] Merged {source_ref} into {target_branch}' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 85864b4442..754f3d32d6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -2,7 +2,7 @@ name: Stable Release on: release: - types: + types: - prereleased jobs: @@ -13,7 +13,7 @@ jobs: steps: - name: 🚛 Checkout Code uses: actions/checkout@v2 - with: + with: fetch-depth: 0 - name: Set up Python @@ -33,27 +33,27 @@ jobs: echo ::set-output name=last_release::$LASTRELEASE echo ::set-output name=release_tag::$RESULT - - name: "✏️ Generate full changelog" - if: steps.version.outputs.release_tag != 'skip' - id: generate-full-changelog - uses: heinrichreimer/github-changelog-generator-action@v2.2 - with: - token: ${{ secrets.ADMIN_TOKEN }} - addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}' - issues: false - issuesWoLabels: false - sinceTag: "3.0.0" - maxIssues: 100 - pullRequests: true - prWoLabels: false - author: false - unreleased: true - compareLink: true - stripGeneratorNotice: true - verbose: true - futureRelease: ${{ steps.version.outputs.release_tag }} - excludeTagsRegex: "CI/.+" - releaseBranch: "main" + # - name: "✏️ Generate full changelog" + # if: steps.version.outputs.release_tag != 'skip' + # id: generate-full-changelog + # uses: heinrichreimer/github-changelog-generator-action@v2.3 + # with: + # token: ${{ secrets.ADMIN_TOKEN }} + # addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}' + # issues: false + # issuesWoLabels: false + # sinceTag: "3.12.0" + # maxIssues: 100 + # pullRequests: true + # prWoLabels: false + # author: false + # unreleased: true + # compareLink: true + # stripGeneratorNotice: true + # verbose: true + # futureRelease: ${{ steps.version.outputs.release_tag }} + # excludeTagsRegex: "CI/.+" + # releaseBranch: "main" - name: 💾 Commit and Tag id: git_commit @@ -73,8 +73,8 @@ jobs: token: ${{ secrets.ADMIN_TOKEN }} branch: main tags: true - unprotect_reviews: true - + unprotect_reviews: true + - name: "✏️ Generate last changelog" if: steps.version.outputs.release_tag != 'skip' id: generate-last-changelog @@ -114,11 +114,11 @@ jobs: with: tag: "${{ steps.version.outputs.current_version }}" - - name: 🔁 Merge main back to develop + - name: 🔁 Merge main back to develop if: steps.version.outputs.release_tag != 'skip' uses: everlytic/branch-merge@1.1.0 with: github_token: ${{ secrets.ADMIN_TOKEN }} source_ref: 'main' target_branch: 'develop' - commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}' \ No newline at end of file + commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}' diff --git a/.gitignore b/.gitignore index ea5b20eb69..18e7cd7bf2 100644 --- a/.gitignore +++ b/.gitignore @@ -107,3 +107,8 @@ website/.docusaurus mypy.ini tools/run_eventserver.* + +# Developer tools +tools/dev_* + +.github_changelog_generator diff --git a/CHANGELOG.md b/CHANGELOG.md index 65a3cb27e6..c4f1dcf314 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,24 +1,259 @@ # Changelog -## [3.14.1-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.4](https://github.com/pypeclub/OpenPype/tree/HEAD) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.0...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.3...HEAD) + +**🆕 New features** + +- Webpublisher: use max next published version number for all items in batch [\#3961](https://github.com/pypeclub/OpenPype/pull/3961) +- General: Control Thumbnail integration via explicit configuration profiles [\#3951](https://github.com/pypeclub/OpenPype/pull/3951) **🚀 Enhancements** -- Ftrack: More logs related to auto sync value change [\#3671](https://github.com/pypeclub/OpenPype/pull/3671) +- Publisher: Multiselection in card view [\#3993](https://github.com/pypeclub/OpenPype/pull/3993) +- TrayPublisher: Original Basename cause crash too early [\#3990](https://github.com/pypeclub/OpenPype/pull/3990) +- Tray Publisher: add `originalBasename` data to simple creators [\#3988](https://github.com/pypeclub/OpenPype/pull/3988) +- General: Custom paths to ffmpeg and OpenImageIO tools [\#3982](https://github.com/pypeclub/OpenPype/pull/3982) +- Integrate: Preserve existing subset group if instance does not set it for new version [\#3976](https://github.com/pypeclub/OpenPype/pull/3976) +- Publisher: Prepare publisher controller for remote publishing [\#3972](https://github.com/pypeclub/OpenPype/pull/3972) +- Maya: new style dataclasses in maya deadline submitter plugin [\#3968](https://github.com/pypeclub/OpenPype/pull/3968) +- Maya: Define preffered Qt bindings for Qt.py and qtpy [\#3963](https://github.com/pypeclub/OpenPype/pull/3963) +- Settings: Move imageio from project anatomy to project settings \[pypeclub\] [\#3959](https://github.com/pypeclub/OpenPype/pull/3959) +- TrayPublisher: Extract thumbnail for other families [\#3952](https://github.com/pypeclub/OpenPype/pull/3952) +- Publisher: Pass instance to subset name method on update [\#3949](https://github.com/pypeclub/OpenPype/pull/3949) +- General: Set root environments before DCC launch [\#3947](https://github.com/pypeclub/OpenPype/pull/3947) +- Refactor: changed legacy way to update database for Hero version integrate [\#3941](https://github.com/pypeclub/OpenPype/pull/3941) +- Maya: Moved plugin from global to maya [\#3939](https://github.com/pypeclub/OpenPype/pull/3939) +- Publisher: Create dialog is part of main window [\#3936](https://github.com/pypeclub/OpenPype/pull/3936) +- Fusion: Implement Alembic and FBX mesh loader [\#3927](https://github.com/pypeclub/OpenPype/pull/3927) +- Maya: Remove hardcoded requirement for maya/ start for image file prefix [\#3873](https://github.com/pypeclub/OpenPype/pull/3873) **🐛 Bug fixes** +- TrayPublisher: Disable sequences in batch mov creator [\#3996](https://github.com/pypeclub/OpenPype/pull/3996) +- Fix - tags might be missing on representation [\#3985](https://github.com/pypeclub/OpenPype/pull/3985) +- Resolve: Fix usage of functions from lib [\#3983](https://github.com/pypeclub/OpenPype/pull/3983) +- Maya: remove invalid prefix token for non-multipart outputs [\#3981](https://github.com/pypeclub/OpenPype/pull/3981) +- Ftrack: Fix schema cache for Python 2 [\#3980](https://github.com/pypeclub/OpenPype/pull/3980) +- Maya: add object to attr.s declaration [\#3973](https://github.com/pypeclub/OpenPype/pull/3973) +- Maya: Deadline OutputFilePath hack regression for Renderman [\#3950](https://github.com/pypeclub/OpenPype/pull/3950) +- Houdini: Fix validate workfile paths for non-parm file references [\#3948](https://github.com/pypeclub/OpenPype/pull/3948) +- Photoshop: missed sync published version of workfile with workfile [\#3946](https://github.com/pypeclub/OpenPype/pull/3946) +- Maya: Set default value for RenderSetupIncludeLights option [\#3944](https://github.com/pypeclub/OpenPype/pull/3944) +- Maya: fix regression of Renderman Deadline hack [\#3943](https://github.com/pypeclub/OpenPype/pull/3943) +- Kitsu: 2 fixes, nb\_frames and Shot type error [\#3940](https://github.com/pypeclub/OpenPype/pull/3940) +- Tray: Change order of attribute changes [\#3938](https://github.com/pypeclub/OpenPype/pull/3938) +- AttributeDefs: Fix crashing multivalue of files widget [\#3937](https://github.com/pypeclub/OpenPype/pull/3937) +- General: Fix links query on hero version [\#3900](https://github.com/pypeclub/OpenPype/pull/3900) +- Publisher: Files Drag n Drop cleanup [\#3888](https://github.com/pypeclub/OpenPype/pull/3888) + +**🔀 Refactored code** + +- Flame: Import lib functions from lib [\#3992](https://github.com/pypeclub/OpenPype/pull/3992) +- General: Fix deprecated warning in legacy creator [\#3978](https://github.com/pypeclub/OpenPype/pull/3978) +- Blender: Remove openpype api imports [\#3977](https://github.com/pypeclub/OpenPype/pull/3977) +- General: Use direct import of resources [\#3964](https://github.com/pypeclub/OpenPype/pull/3964) +- General: Direct settings imports [\#3934](https://github.com/pypeclub/OpenPype/pull/3934) +- General: import 'Logger' from 'openpype.lib' [\#3926](https://github.com/pypeclub/OpenPype/pull/3926) +- General: Remove deprecated functions from lib [\#3907](https://github.com/pypeclub/OpenPype/pull/3907) + +**Merged pull requests:** + +- Maya + Yeti: Load Yeti Cache fix frame number recognition [\#3942](https://github.com/pypeclub/OpenPype/pull/3942) +- Fusion: Implement callbacks to Fusion's event system thread [\#3928](https://github.com/pypeclub/OpenPype/pull/3928) +- Photoshop: create single frame image in Ftrack as review [\#3908](https://github.com/pypeclub/OpenPype/pull/3908) + +## [3.14.3](https://github.com/pypeclub/OpenPype/tree/3.14.3) (2022-10-03) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.2...3.14.3) + +**🚀 Enhancements** + +- Publisher: Enhancement proposals [\#3897](https://github.com/pypeclub/OpenPype/pull/3897) +- Maya: better logging in Maketx [\#3886](https://github.com/pypeclub/OpenPype/pull/3886) +- Photoshop: review can be turned off [\#3885](https://github.com/pypeclub/OpenPype/pull/3885) +- TrayPublisher: added persisting of last selected project [\#3871](https://github.com/pypeclub/OpenPype/pull/3871) +- TrayPublisher: added text filter on project name to Tray Publisher [\#3867](https://github.com/pypeclub/OpenPype/pull/3867) +- Github issues adding `running version` section [\#3864](https://github.com/pypeclub/OpenPype/pull/3864) +- Publisher: Increase size of main window [\#3862](https://github.com/pypeclub/OpenPype/pull/3862) +- Flame: make migratable projects after creation [\#3860](https://github.com/pypeclub/OpenPype/pull/3860) +- Photoshop: synchronize image version with workfile [\#3854](https://github.com/pypeclub/OpenPype/pull/3854) + +**🐛 Bug fixes** + +- Maya: Fix Render single camera validator [\#3929](https://github.com/pypeclub/OpenPype/pull/3929) +- Flame: loading multilayer exr to batch/reel is working [\#3901](https://github.com/pypeclub/OpenPype/pull/3901) +- Hiero: Fix inventory check on launch [\#3895](https://github.com/pypeclub/OpenPype/pull/3895) +- WebPublisher: Fix import after refactor [\#3891](https://github.com/pypeclub/OpenPype/pull/3891) +- TVPaint: Fix renaming of rendered files [\#3882](https://github.com/pypeclub/OpenPype/pull/3882) +- Publisher: Nice checkbox visible in Python 2 [\#3877](https://github.com/pypeclub/OpenPype/pull/3877) +- Settings: Add missing default settings [\#3870](https://github.com/pypeclub/OpenPype/pull/3870) +- General: Copy of workfile does not use 'copy' function but 'copyfile' [\#3869](https://github.com/pypeclub/OpenPype/pull/3869) +- Tray Publisher: skip plugin if otioTimeline is missing [\#3856](https://github.com/pypeclub/OpenPype/pull/3856) +- Flame: retimed attributes are integrated with settings [\#3855](https://github.com/pypeclub/OpenPype/pull/3855) + +**🔀 Refactored code** + +- Maya: Remove unused 'openpype.api' imports in plugins [\#3925](https://github.com/pypeclub/OpenPype/pull/3925) +- Resolve: Use new Extractor location [\#3918](https://github.com/pypeclub/OpenPype/pull/3918) +- Unreal: Use new Extractor location [\#3917](https://github.com/pypeclub/OpenPype/pull/3917) +- Flame: Use new Extractor location [\#3916](https://github.com/pypeclub/OpenPype/pull/3916) +- Houdini: Use new Extractor location [\#3894](https://github.com/pypeclub/OpenPype/pull/3894) +- Harmony: Use new Extractor location [\#3893](https://github.com/pypeclub/OpenPype/pull/3893) + +**Merged pull requests:** + +- Maya: Fix Scene Inventory possibly starting off-screen due to maya preferences [\#3923](https://github.com/pypeclub/OpenPype/pull/3923) +- Maya: RenderSettings set default image format for V-Ray+Redshift to exr [\#3879](https://github.com/pypeclub/OpenPype/pull/3879) +- Remove lockfile during publish [\#3874](https://github.com/pypeclub/OpenPype/pull/3874) + +## [3.14.2](https://github.com/pypeclub/OpenPype/tree/3.14.2) (2022-09-12) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...3.14.2) + +### 📖 Documentation + +- Documentation: Anatomy templates [\#3618](https://github.com/pypeclub/OpenPype/pull/3618) + +**🆕 New features** + +- Nuke: Build workfile by template [\#3763](https://github.com/pypeclub/OpenPype/pull/3763) +- Houdini: Publishing workfiles [\#3697](https://github.com/pypeclub/OpenPype/pull/3697) +- Global: making collect audio plugin global [\#3679](https://github.com/pypeclub/OpenPype/pull/3679) + +**🚀 Enhancements** + +- Flame: Adding Creator's retimed shot and handles switch [\#3826](https://github.com/pypeclub/OpenPype/pull/3826) +- Flame: OpenPype submenu to batch and media manager [\#3825](https://github.com/pypeclub/OpenPype/pull/3825) +- General: Better pixmap scaling [\#3809](https://github.com/pypeclub/OpenPype/pull/3809) +- Photoshop: attempt to speed up ExtractImage [\#3793](https://github.com/pypeclub/OpenPype/pull/3793) +- SyncServer: Added cli commands for sync server [\#3765](https://github.com/pypeclub/OpenPype/pull/3765) +- Kitsu: Drop 'entities root' setting. [\#3739](https://github.com/pypeclub/OpenPype/pull/3739) +- git: update gitignore [\#3722](https://github.com/pypeclub/OpenPype/pull/3722) +- Blender: Publisher collect workfile representation [\#3670](https://github.com/pypeclub/OpenPype/pull/3670) +- Maya: move set render settings menu entry [\#3669](https://github.com/pypeclub/OpenPype/pull/3669) +- Scene Inventory: Maya add actions to select from or to scene [\#3659](https://github.com/pypeclub/OpenPype/pull/3659) +- Scene Inventory: Add subsetGroup column [\#3658](https://github.com/pypeclub/OpenPype/pull/3658) + +**🐛 Bug fixes** + +- General: Fix Pattern access in client code [\#3828](https://github.com/pypeclub/OpenPype/pull/3828) +- Launcher: Skip opening last work file works for groups [\#3822](https://github.com/pypeclub/OpenPype/pull/3822) +- Maya: Publishing data key change [\#3811](https://github.com/pypeclub/OpenPype/pull/3811) +- Igniter: Fix status handling when version is already installed [\#3804](https://github.com/pypeclub/OpenPype/pull/3804) +- Resolve: Addon import is Python 2 compatible [\#3798](https://github.com/pypeclub/OpenPype/pull/3798) +- Hiero: retimed clip publishing is working [\#3792](https://github.com/pypeclub/OpenPype/pull/3792) +- nuke: validate write node is not failing due wrong type [\#3780](https://github.com/pypeclub/OpenPype/pull/3780) +- Fix - changed format of version string in pyproject.toml [\#3777](https://github.com/pypeclub/OpenPype/pull/3777) +- Ftrack status fix typo prgoress -\> progress [\#3761](https://github.com/pypeclub/OpenPype/pull/3761) +- Fix version resolution [\#3757](https://github.com/pypeclub/OpenPype/pull/3757) +- Maya: `containerise` dont skip empty values [\#3674](https://github.com/pypeclub/OpenPype/pull/3674) + +**🔀 Refactored code** + +- Photoshop: Use new Extractor location [\#3789](https://github.com/pypeclub/OpenPype/pull/3789) +- Blender: Use new Extractor location [\#3787](https://github.com/pypeclub/OpenPype/pull/3787) +- AfterEffects: Use new Extractor location [\#3784](https://github.com/pypeclub/OpenPype/pull/3784) +- General: Remove unused teshost [\#3773](https://github.com/pypeclub/OpenPype/pull/3773) +- General: Copied 'Extractor' plugin to publish pipeline [\#3771](https://github.com/pypeclub/OpenPype/pull/3771) +- General: Move queries of asset and representation links [\#3770](https://github.com/pypeclub/OpenPype/pull/3770) +- General: Move create project folders to pipeline [\#3768](https://github.com/pypeclub/OpenPype/pull/3768) +- General: Create project function moved to client code [\#3766](https://github.com/pypeclub/OpenPype/pull/3766) +- Maya: Refactor submit deadline to use AbstractSubmitDeadline [\#3759](https://github.com/pypeclub/OpenPype/pull/3759) +- General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) +- General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) +- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) +- Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) +- Fusion: Defined fusion as addon [\#3733](https://github.com/pypeclub/OpenPype/pull/3733) +- Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) +- Resolve: Define resolve as addon [\#3727](https://github.com/pypeclub/OpenPype/pull/3727) + +**Merged pull requests:** + +- Standalone Publisher: Ignore empty labels, then still use name like other asset models [\#3779](https://github.com/pypeclub/OpenPype/pull/3779) +- Kitsu - sync\_all\_project - add list ignore\_projects [\#3776](https://github.com/pypeclub/OpenPype/pull/3776) + +## [3.14.1](https://github.com/pypeclub/OpenPype/tree/3.14.1) (2022-08-30) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.0...3.14.1) + +### 📖 Documentation + +- Documentation: Few updates [\#3698](https://github.com/pypeclub/OpenPype/pull/3698) +- Documentation: Settings development [\#3660](https://github.com/pypeclub/OpenPype/pull/3660) + +**🆕 New features** + +- Webpublisher:change create flatten image into tri state [\#3678](https://github.com/pypeclub/OpenPype/pull/3678) +- Blender: validators code correction with settings and defaults [\#3662](https://github.com/pypeclub/OpenPype/pull/3662) + +**🚀 Enhancements** + +- General: Thumbnail can use project roots [\#3750](https://github.com/pypeclub/OpenPype/pull/3750) +- Settings: Remove settings lock on tray exit [\#3720](https://github.com/pypeclub/OpenPype/pull/3720) +- General: Added helper getters to modules manager [\#3712](https://github.com/pypeclub/OpenPype/pull/3712) +- Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) +- Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) +- General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686) +- Ftrack: Store ftrack entities on hierarchy integration to instances [\#3677](https://github.com/pypeclub/OpenPype/pull/3677) +- Ftrack: More logs related to auto sync value change [\#3671](https://github.com/pypeclub/OpenPype/pull/3671) +- Blender: ops refresh manager after process events [\#3663](https://github.com/pypeclub/OpenPype/pull/3663) + +**🐛 Bug fixes** + +- Maya: Fix typo in getPanel argument `with_focus` -\> `withFocus` [\#3753](https://github.com/pypeclub/OpenPype/pull/3753) +- General: Smaller fixes of imports [\#3748](https://github.com/pypeclub/OpenPype/pull/3748) +- General: Logger tweaks [\#3741](https://github.com/pypeclub/OpenPype/pull/3741) +- Nuke: missing job dependency if multiple bake streams [\#3737](https://github.com/pypeclub/OpenPype/pull/3737) +- Nuke: color-space settings from anatomy is working [\#3721](https://github.com/pypeclub/OpenPype/pull/3721) +- Settings: Fix studio default anatomy save [\#3716](https://github.com/pypeclub/OpenPype/pull/3716) +- Maya: Use project name instead of project code [\#3709](https://github.com/pypeclub/OpenPype/pull/3709) +- Settings: Fix project overrides save [\#3708](https://github.com/pypeclub/OpenPype/pull/3708) +- Workfiles tool: Fix published workfile filtering [\#3704](https://github.com/pypeclub/OpenPype/pull/3704) +- PS, AE: Provide default variant value for workfile subset [\#3703](https://github.com/pypeclub/OpenPype/pull/3703) - RoyalRender: handle host name that is not set [\#3695](https://github.com/pypeclub/OpenPype/pull/3695) +- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) +- Webpublisher: added check for empty context [\#3682](https://github.com/pypeclub/OpenPype/pull/3682) + +**🔀 Refactored code** + +- General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) +- General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) +- Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) +- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) +- Harmony: Defined harmony as addon [\#3734](https://github.com/pypeclub/OpenPype/pull/3734) +- General: Module interfaces cleanup [\#3731](https://github.com/pypeclub/OpenPype/pull/3731) +- AfterEffects: Move AE functions from general lib [\#3730](https://github.com/pypeclub/OpenPype/pull/3730) +- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) +- AfterEffects: Define AfterEffects as module [\#3728](https://github.com/pypeclub/OpenPype/pull/3728) +- General: Replace PypeLogger with Logger [\#3725](https://github.com/pypeclub/OpenPype/pull/3725) +- Nuke: Define nuke as module [\#3724](https://github.com/pypeclub/OpenPype/pull/3724) +- General: Move subset name functionality [\#3723](https://github.com/pypeclub/OpenPype/pull/3723) +- General: Move creators plugin getter [\#3714](https://github.com/pypeclub/OpenPype/pull/3714) +- General: Move constants from lib to client [\#3713](https://github.com/pypeclub/OpenPype/pull/3713) +- Loader: Subset groups using client operations [\#3710](https://github.com/pypeclub/OpenPype/pull/3710) +- TVPaint: Defined as module [\#3707](https://github.com/pypeclub/OpenPype/pull/3707) +- StandalonePublisher: Define StandalonePublisher as module [\#3706](https://github.com/pypeclub/OpenPype/pull/3706) +- TrayPublisher: Define TrayPublisher as module [\#3705](https://github.com/pypeclub/OpenPype/pull/3705) +- General: Move context specific functions to context tools [\#3702](https://github.com/pypeclub/OpenPype/pull/3702) + +**Merged pull requests:** + +- Hiero: Define hiero as module [\#3717](https://github.com/pypeclub/OpenPype/pull/3717) +- Deadline: better logging for DL webservice failures [\#3694](https://github.com/pypeclub/OpenPype/pull/3694) +- Photoshop: resize saved images in ExtractReview for ffmpeg [\#3676](https://github.com/pypeclub/OpenPype/pull/3676) +- Nuke: Validation refactory to new publisher [\#3567](https://github.com/pypeclub/OpenPype/pull/3567) ## [3.14.0](https://github.com/pypeclub/OpenPype/tree/3.14.0) (2022-08-18) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.0-nightly.1...3.14.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.13.0...3.14.0) **🆕 New features** - Maya: Build workfile by template [\#3578](https://github.com/pypeclub/OpenPype/pull/3578) +- Maya: Implementation of JSON layout for Unreal workflow [\#3353](https://github.com/pypeclub/OpenPype/pull/3353) +- Maya: Build workfile by template [\#3315](https://github.com/pypeclub/OpenPype/pull/3315) **🚀 Enhancements** @@ -56,11 +291,12 @@ ## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.13.0-nightly.1...3.13.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.2...3.13.0) **🆕 New features** - Support for mutliple installed versions - 3.13 [\#3605](https://github.com/pypeclub/OpenPype/pull/3605) +- Traypublisher: simple editorial publishing [\#3492](https://github.com/pypeclub/OpenPype/pull/3492) **🚀 Enhancements** @@ -73,6 +309,7 @@ - General: Python module appdirs from git [\#3589](https://github.com/pypeclub/OpenPype/pull/3589) - Ftrack: Update ftrack api to 2.3.3 [\#3588](https://github.com/pypeclub/OpenPype/pull/3588) - General: New Integrator small fixes [\#3583](https://github.com/pypeclub/OpenPype/pull/3583) +- Maya: Render Creator has configurable options. [\#3097](https://github.com/pypeclub/OpenPype/pull/3097) **🐛 Bug fixes** @@ -109,7 +346,7 @@ ## [3.12.2](https://github.com/pypeclub/OpenPype/tree/3.12.2) (2022-07-27) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.2-nightly.4...3.12.2) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.1...3.12.2) ### 📖 Documentation @@ -119,6 +356,19 @@ **🚀 Enhancements** - General: Global thumbnail extractor is ready for more cases [\#3561](https://github.com/pypeclub/OpenPype/pull/3561) +- Maya: add additional validators to Settings [\#3540](https://github.com/pypeclub/OpenPype/pull/3540) +- General: Interactive console in cli [\#3526](https://github.com/pypeclub/OpenPype/pull/3526) +- Ftrack: Automatic daily review session creation can define trigger hour [\#3516](https://github.com/pypeclub/OpenPype/pull/3516) +- Ftrack: add source into Note [\#3509](https://github.com/pypeclub/OpenPype/pull/3509) +- Ftrack: Trigger custom ftrack topic of project structure creation [\#3506](https://github.com/pypeclub/OpenPype/pull/3506) +- Settings UI: Add extract to file action on project view [\#3505](https://github.com/pypeclub/OpenPype/pull/3505) +- Add pack and unpack convenience scripts [\#3502](https://github.com/pypeclub/OpenPype/pull/3502) +- General: Event system [\#3499](https://github.com/pypeclub/OpenPype/pull/3499) +- NewPublisher: Keep plugins with mismatch target in report [\#3498](https://github.com/pypeclub/OpenPype/pull/3498) +- Nuke: load clip with options from settings [\#3497](https://github.com/pypeclub/OpenPype/pull/3497) +- TrayPublisher: implemented render\_mov\_batch [\#3486](https://github.com/pypeclub/OpenPype/pull/3486) +- Migrate basic families to the new Tray Publisher [\#3469](https://github.com/pypeclub/OpenPype/pull/3469) +- Enhance powershell build scripts [\#1827](https://github.com/pypeclub/OpenPype/pull/1827) **🐛 Bug fixes** @@ -129,10 +379,30 @@ - General: Remove hosts filter on integrator plugins [\#3556](https://github.com/pypeclub/OpenPype/pull/3556) - Settings: Clean default values of environments [\#3550](https://github.com/pypeclub/OpenPype/pull/3550) - Module interfaces: Fix import error [\#3547](https://github.com/pypeclub/OpenPype/pull/3547) +- Workfiles tool: Show of tool and it's flags [\#3539](https://github.com/pypeclub/OpenPype/pull/3539) +- General: Create workfile documents works again [\#3538](https://github.com/pypeclub/OpenPype/pull/3538) +- Additional fixes for powershell scripts [\#3525](https://github.com/pypeclub/OpenPype/pull/3525) +- Maya: Added wrapper around cmds.setAttr [\#3523](https://github.com/pypeclub/OpenPype/pull/3523) +- Nuke: double slate [\#3521](https://github.com/pypeclub/OpenPype/pull/3521) +- General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519) +- Maya: Renderman display output fix [\#3514](https://github.com/pypeclub/OpenPype/pull/3514) +- TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) +- NewPublisher: Publish attributes are properly collected [\#3510](https://github.com/pypeclub/OpenPype/pull/3510) +- TrayPublisher: Make sure host name is filled [\#3504](https://github.com/pypeclub/OpenPype/pull/3504) +- NewPublisher: Groups work and enum multivalue [\#3501](https://github.com/pypeclub/OpenPype/pull/3501) **🔀 Refactored code** - General: Use query functions in integrator [\#3563](https://github.com/pypeclub/OpenPype/pull/3563) +- General: Mongo core connection moved to client [\#3531](https://github.com/pypeclub/OpenPype/pull/3531) +- Refactor Integrate Asset [\#3530](https://github.com/pypeclub/OpenPype/pull/3530) +- General: Client docstrings cleanup [\#3529](https://github.com/pypeclub/OpenPype/pull/3529) +- General: Move load related functions into pipeline [\#3527](https://github.com/pypeclub/OpenPype/pull/3527) +- General: Get current context document functions [\#3522](https://github.com/pypeclub/OpenPype/pull/3522) +- Kitsu: Use query function from client [\#3496](https://github.com/pypeclub/OpenPype/pull/3496) +- TimersManager: Use query functions [\#3495](https://github.com/pypeclub/OpenPype/pull/3495) +- Deadline: Use query functions [\#3466](https://github.com/pypeclub/OpenPype/pull/3466) +- Refactor Integrate Asset [\#2898](https://github.com/pypeclub/OpenPype/pull/2898) **Merged pull requests:** @@ -140,23 +410,359 @@ ## [3.12.1](https://github.com/pypeclub/OpenPype/tree/3.12.1) (2022-07-13) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.1-nightly.6...3.12.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.0...3.12.1) + +### 📖 Documentation + +- Docs: Added minimal permissions for MongoDB [\#3441](https://github.com/pypeclub/OpenPype/pull/3441) + +**🆕 New features** + +- Maya: Add VDB to Arnold loader [\#3433](https://github.com/pypeclub/OpenPype/pull/3433) + +**🚀 Enhancements** + +- TrayPublisher: Added more options for grouping of instances [\#3494](https://github.com/pypeclub/OpenPype/pull/3494) +- NewPublisher: Align creator attributes from top to bottom [\#3487](https://github.com/pypeclub/OpenPype/pull/3487) +- NewPublisher: Added ability to use label of instance [\#3484](https://github.com/pypeclub/OpenPype/pull/3484) +- General: Creator Plugins have access to project [\#3476](https://github.com/pypeclub/OpenPype/pull/3476) +- General: Better arguments order in creator init [\#3475](https://github.com/pypeclub/OpenPype/pull/3475) +- Ftrack: Trigger custom ftrack events on project creation and preparation [\#3465](https://github.com/pypeclub/OpenPype/pull/3465) +- Windows installer: Clean old files and add version subfolder [\#3445](https://github.com/pypeclub/OpenPype/pull/3445) +- Blender: Bugfix - Set fps properly on open [\#3426](https://github.com/pypeclub/OpenPype/pull/3426) +- Hiero: Add custom scripts menu [\#3425](https://github.com/pypeclub/OpenPype/pull/3425) +- Blender: pre pyside install for all platforms [\#3400](https://github.com/pypeclub/OpenPype/pull/3400) +- Maya: Add additional playblast options to review Extractor. [\#3384](https://github.com/pypeclub/OpenPype/pull/3384) +- Maya: Ability to set resolution for playblasts from asset, and override through review instance. [\#3360](https://github.com/pypeclub/OpenPype/pull/3360) +- Maya: Redshift Volume Loader Implement update, remove, switch + fix vdb sequence support [\#3197](https://github.com/pypeclub/OpenPype/pull/3197) +- Maya: Implement `iter_visible_nodes_in_range` for extracting Alembics [\#3100](https://github.com/pypeclub/OpenPype/pull/3100) + +**🐛 Bug fixes** + +- TrayPublisher: Keep use instance label in list view [\#3493](https://github.com/pypeclub/OpenPype/pull/3493) +- General: Extract review use first frame of input sequence [\#3491](https://github.com/pypeclub/OpenPype/pull/3491) +- General: Fix Plist loading for application launch [\#3485](https://github.com/pypeclub/OpenPype/pull/3485) +- Nuke: Workfile tools open on start [\#3479](https://github.com/pypeclub/OpenPype/pull/3479) +- New Publisher: Disabled context change allows creation [\#3478](https://github.com/pypeclub/OpenPype/pull/3478) +- General: thumbnail extractor fix [\#3474](https://github.com/pypeclub/OpenPype/pull/3474) +- Kitsu: bugfix with sync-service ans publish plugins [\#3473](https://github.com/pypeclub/OpenPype/pull/3473) +- Flame: solved problem with multi-selected loading [\#3470](https://github.com/pypeclub/OpenPype/pull/3470) +- General: Fix query function in update logic [\#3468](https://github.com/pypeclub/OpenPype/pull/3468) +- Resolve: removed few bugs [\#3464](https://github.com/pypeclub/OpenPype/pull/3464) +- General: Delete old versions is safer when ftrack is disabled [\#3462](https://github.com/pypeclub/OpenPype/pull/3462) +- Nuke: fixing metadata slate TC difference [\#3455](https://github.com/pypeclub/OpenPype/pull/3455) +- Nuke: prerender reviewable fails [\#3450](https://github.com/pypeclub/OpenPype/pull/3450) +- Maya: fix hashing in Python 3 for tile rendering [\#3447](https://github.com/pypeclub/OpenPype/pull/3447) +- LogViewer: Escape html characters in log message [\#3443](https://github.com/pypeclub/OpenPype/pull/3443) +- Nuke: Slate frame is integrated [\#3427](https://github.com/pypeclub/OpenPype/pull/3427) +- Maya: Camera extra data - additional fix for \#3304 [\#3386](https://github.com/pypeclub/OpenPype/pull/3386) +- Maya: Handle excluding `model` family from frame range validator. [\#3370](https://github.com/pypeclub/OpenPype/pull/3370) + +**🔀 Refactored code** + +- Maya: Merge animation + pointcache extractor logic [\#3461](https://github.com/pypeclub/OpenPype/pull/3461) +- Maya: Re-use `maintained_time` from lib [\#3460](https://github.com/pypeclub/OpenPype/pull/3460) +- General: Use query functions in global plugins [\#3459](https://github.com/pypeclub/OpenPype/pull/3459) +- Clockify: Use query functions in clockify actions [\#3458](https://github.com/pypeclub/OpenPype/pull/3458) +- General: Use query functions in rest api calls [\#3457](https://github.com/pypeclub/OpenPype/pull/3457) +- General: Use query functions in openpype lib functions [\#3454](https://github.com/pypeclub/OpenPype/pull/3454) +- General: Use query functions in load utils [\#3446](https://github.com/pypeclub/OpenPype/pull/3446) +- General: Move publish plugin and publish render abstractions [\#3442](https://github.com/pypeclub/OpenPype/pull/3442) +- General: Use Anatomy after move to pipeline [\#3436](https://github.com/pypeclub/OpenPype/pull/3436) +- General: Anatomy moved to pipeline [\#3435](https://github.com/pypeclub/OpenPype/pull/3435) +- Fusion: Use client query functions [\#3380](https://github.com/pypeclub/OpenPype/pull/3380) +- Resolve: Use client query functions [\#3379](https://github.com/pypeclub/OpenPype/pull/3379) +- General: Host implementation defined with class [\#3337](https://github.com/pypeclub/OpenPype/pull/3337) ## [3.12.0](https://github.com/pypeclub/OpenPype/tree/3.12.0) (2022-06-28) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.0-nightly.3...3.12.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.11.1...3.12.0) + +### 📖 Documentation + +- Fix typo in documentation: pyenv on mac [\#3417](https://github.com/pypeclub/OpenPype/pull/3417) +- Linux: update OIIO package [\#3401](https://github.com/pypeclub/OpenPype/pull/3401) + +**🆕 New features** + +- Shotgrid: Add production beta of shotgrid integration [\#2921](https://github.com/pypeclub/OpenPype/pull/2921) + +**🚀 Enhancements** + +- Webserver: Added CORS middleware [\#3422](https://github.com/pypeclub/OpenPype/pull/3422) +- Attribute Defs UI: Files widget show what is allowed to drop in [\#3411](https://github.com/pypeclub/OpenPype/pull/3411) +- General: Add ability to change user value for templates [\#3366](https://github.com/pypeclub/OpenPype/pull/3366) +- Hosts: More options for in-host callbacks [\#3357](https://github.com/pypeclub/OpenPype/pull/3357) +- Multiverse: expose some settings to GUI [\#3350](https://github.com/pypeclub/OpenPype/pull/3350) +- Maya: Allow more data to be published along camera 🎥 [\#3304](https://github.com/pypeclub/OpenPype/pull/3304) +- Add root keys and project keys to create starting folder [\#2755](https://github.com/pypeclub/OpenPype/pull/2755) + +**🐛 Bug fixes** + +- NewPublisher: Fix subset name change on change of creator plugin [\#3420](https://github.com/pypeclub/OpenPype/pull/3420) +- Bug: fix invalid avalon import [\#3418](https://github.com/pypeclub/OpenPype/pull/3418) +- Nuke: Fix keyword argument in query function [\#3414](https://github.com/pypeclub/OpenPype/pull/3414) +- Houdini: fix loading and updating vbd/bgeo sequences [\#3408](https://github.com/pypeclub/OpenPype/pull/3408) +- Nuke: Collect representation files based on Write [\#3407](https://github.com/pypeclub/OpenPype/pull/3407) +- General: Filter representations before integration start [\#3398](https://github.com/pypeclub/OpenPype/pull/3398) +- Maya: look collector typo [\#3392](https://github.com/pypeclub/OpenPype/pull/3392) +- TVPaint: Make sure exit code is set to not None [\#3382](https://github.com/pypeclub/OpenPype/pull/3382) +- Maya: vray device aspect ratio fix [\#3381](https://github.com/pypeclub/OpenPype/pull/3381) +- Flame: bunch of publishing issues [\#3377](https://github.com/pypeclub/OpenPype/pull/3377) +- Harmony: added unc path to zifile command in Harmony [\#3372](https://github.com/pypeclub/OpenPype/pull/3372) +- Standalone: settings improvements [\#3355](https://github.com/pypeclub/OpenPype/pull/3355) +- Nuke: Load full model hierarchy by default [\#3328](https://github.com/pypeclub/OpenPype/pull/3328) +- Nuke: multiple baking streams with correct slate [\#3245](https://github.com/pypeclub/OpenPype/pull/3245) +- Maya: fix image prefix warning in validator [\#3128](https://github.com/pypeclub/OpenPype/pull/3128) + +**🔀 Refactored code** + +- Unreal: Use client query functions [\#3421](https://github.com/pypeclub/OpenPype/pull/3421) +- General: Move editorial lib to pipeline [\#3419](https://github.com/pypeclub/OpenPype/pull/3419) +- Kitsu: renaming to plural func sync\_all\_projects [\#3397](https://github.com/pypeclub/OpenPype/pull/3397) +- Houdini: Use client query functions [\#3395](https://github.com/pypeclub/OpenPype/pull/3395) +- Hiero: Use client query functions [\#3393](https://github.com/pypeclub/OpenPype/pull/3393) +- Nuke: Use client query functions [\#3391](https://github.com/pypeclub/OpenPype/pull/3391) +- Maya: Use client query functions [\#3385](https://github.com/pypeclub/OpenPype/pull/3385) +- Harmony: Use client query functions [\#3378](https://github.com/pypeclub/OpenPype/pull/3378) +- Celaction: Use client query functions [\#3376](https://github.com/pypeclub/OpenPype/pull/3376) +- Photoshop: Use client query functions [\#3375](https://github.com/pypeclub/OpenPype/pull/3375) +- AfterEffects: Use client query functions [\#3374](https://github.com/pypeclub/OpenPype/pull/3374) +- TVPaint: Use client query functions [\#3340](https://github.com/pypeclub/OpenPype/pull/3340) +- Ftrack: Use client query functions [\#3339](https://github.com/pypeclub/OpenPype/pull/3339) +- Standalone Publisher: Use client query functions [\#3330](https://github.com/pypeclub/OpenPype/pull/3330) + +**Merged pull requests:** + +- Sync Queue: Added far future value for null values for dates [\#3371](https://github.com/pypeclub/OpenPype/pull/3371) +- Maya - added support for single frame playblast review [\#3369](https://github.com/pypeclub/OpenPype/pull/3369) +- Houdini: Implement Redshift Proxy Export [\#3196](https://github.com/pypeclub/OpenPype/pull/3196) ## [3.11.1](https://github.com/pypeclub/OpenPype/tree/3.11.1) (2022-06-20) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.11.1-nightly.1...3.11.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.11.0...3.11.1) + +**🆕 New features** + +- Flame: custom export temp folder [\#3346](https://github.com/pypeclub/OpenPype/pull/3346) +- Nuke: removing third-party plugins [\#3344](https://github.com/pypeclub/OpenPype/pull/3344) + +**🚀 Enhancements** + +- Pyblish Pype: Hiding/Close issues [\#3367](https://github.com/pypeclub/OpenPype/pull/3367) +- Ftrack: Removed requirement of pypeclub role from default settings [\#3354](https://github.com/pypeclub/OpenPype/pull/3354) +- Kitsu: Prevent crash on missing frames information [\#3352](https://github.com/pypeclub/OpenPype/pull/3352) +- Ftrack: Open browser from tray [\#3320](https://github.com/pypeclub/OpenPype/pull/3320) +- Enhancement: More control over thumbnail processing. [\#3259](https://github.com/pypeclub/OpenPype/pull/3259) + +**🐛 Bug fixes** + +- Nuke: bake streams with slate on farm [\#3368](https://github.com/pypeclub/OpenPype/pull/3368) +- Harmony: audio validator has wrong logic [\#3364](https://github.com/pypeclub/OpenPype/pull/3364) +- Nuke: Fix missing variable in extract thumbnail [\#3363](https://github.com/pypeclub/OpenPype/pull/3363) +- Nuke: Fix precollect writes [\#3361](https://github.com/pypeclub/OpenPype/pull/3361) +- AE- fix validate\_scene\_settings and renderLocal [\#3358](https://github.com/pypeclub/OpenPype/pull/3358) +- deadline: fixing misidentification of revieables [\#3356](https://github.com/pypeclub/OpenPype/pull/3356) +- General: Create only one thumbnail per instance [\#3351](https://github.com/pypeclub/OpenPype/pull/3351) +- nuke: adding extract thumbnail settings 3.10 [\#3347](https://github.com/pypeclub/OpenPype/pull/3347) +- General: Fix last version function [\#3345](https://github.com/pypeclub/OpenPype/pull/3345) +- Deadline: added OPENPYPE\_MONGO to filter [\#3336](https://github.com/pypeclub/OpenPype/pull/3336) +- Nuke: fixing farm publishing if review is disabled [\#3306](https://github.com/pypeclub/OpenPype/pull/3306) +- Maya: Fix Yeti errors on Create, Publish and Load [\#3198](https://github.com/pypeclub/OpenPype/pull/3198) + +**🔀 Refactored code** + +- Webpublisher: Use client query functions [\#3333](https://github.com/pypeclub/OpenPype/pull/3333) ## [3.11.0](https://github.com/pypeclub/OpenPype/tree/3.11.0) (2022-06-17) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.11.0-nightly.4...3.11.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.10.0...3.11.0) + +### 📖 Documentation + +- Documentation: Add app key to template documentation [\#3299](https://github.com/pypeclub/OpenPype/pull/3299) +- doc: adding royal render and multiverse to the web site [\#3285](https://github.com/pypeclub/OpenPype/pull/3285) +- Module: Kitsu module [\#2650](https://github.com/pypeclub/OpenPype/pull/2650) + +**🆕 New features** + +- Multiverse: fixed composition write, full docs, cosmetics [\#3178](https://github.com/pypeclub/OpenPype/pull/3178) + +**🚀 Enhancements** + +- Settings: Settings can be extracted from UI [\#3323](https://github.com/pypeclub/OpenPype/pull/3323) +- updated poetry installation source [\#3316](https://github.com/pypeclub/OpenPype/pull/3316) +- Ftrack: Action to easily create daily review session [\#3310](https://github.com/pypeclub/OpenPype/pull/3310) +- TVPaint: Extractor use mark in/out range to render [\#3309](https://github.com/pypeclub/OpenPype/pull/3309) +- Ftrack: Delivery action can work on ReviewSessions [\#3307](https://github.com/pypeclub/OpenPype/pull/3307) +- Maya: Look assigner UI improvements [\#3298](https://github.com/pypeclub/OpenPype/pull/3298) +- Ftrack: Action to transfer values of hierarchical attributes [\#3284](https://github.com/pypeclub/OpenPype/pull/3284) +- Maya: better handling of legacy review subsets names [\#3269](https://github.com/pypeclub/OpenPype/pull/3269) +- General: Updated windows oiio tool [\#3268](https://github.com/pypeclub/OpenPype/pull/3268) +- Unreal: add support for skeletalMesh and staticMesh to loaders [\#3267](https://github.com/pypeclub/OpenPype/pull/3267) +- Maya: reference loaders could store placeholder in referenced url [\#3264](https://github.com/pypeclub/OpenPype/pull/3264) +- TVPaint: Init file for TVPaint worker also handle guideline images [\#3250](https://github.com/pypeclub/OpenPype/pull/3250) +- Nuke: Change default icon path in settings [\#3247](https://github.com/pypeclub/OpenPype/pull/3247) +- Maya: publishing of animation and pointcache on a farm [\#3225](https://github.com/pypeclub/OpenPype/pull/3225) +- Maya: Look assigner UI improvements [\#3208](https://github.com/pypeclub/OpenPype/pull/3208) +- Nuke: add pointcache and animation to loader [\#3186](https://github.com/pypeclub/OpenPype/pull/3186) +- Nuke: Add a gizmo menu [\#3172](https://github.com/pypeclub/OpenPype/pull/3172) +- Support for Unreal 5 [\#3122](https://github.com/pypeclub/OpenPype/pull/3122) + +**🐛 Bug fixes** + +- General: Handle empty source key on instance [\#3342](https://github.com/pypeclub/OpenPype/pull/3342) +- Houdini: Fix Houdini VDB manage update wrong file attribute name [\#3322](https://github.com/pypeclub/OpenPype/pull/3322) +- Nuke: anatomy compatibility issue hacks [\#3321](https://github.com/pypeclub/OpenPype/pull/3321) +- hiero: otio p3 compatibility issue - metadata on effect use update 3.11 [\#3314](https://github.com/pypeclub/OpenPype/pull/3314) +- General: Vendorized modules for Python 2 and update poetry lock [\#3305](https://github.com/pypeclub/OpenPype/pull/3305) +- Fix - added local targets to install host [\#3303](https://github.com/pypeclub/OpenPype/pull/3303) +- Settings: Add missing default settings for nuke gizmo [\#3301](https://github.com/pypeclub/OpenPype/pull/3301) +- Maya: Fix swaped width and height in reviews [\#3300](https://github.com/pypeclub/OpenPype/pull/3300) +- Maya: point cache publish handles Maya instances [\#3297](https://github.com/pypeclub/OpenPype/pull/3297) +- Global: extract review slate issues [\#3286](https://github.com/pypeclub/OpenPype/pull/3286) +- Webpublisher: return only active projects in ProjectsEndpoint [\#3281](https://github.com/pypeclub/OpenPype/pull/3281) +- Hiero: add support for task tags 3.10.x [\#3279](https://github.com/pypeclub/OpenPype/pull/3279) +- General: Fix Oiio tool path resolving [\#3278](https://github.com/pypeclub/OpenPype/pull/3278) +- Maya: Fix udim support for e.g. uppercase \ tag [\#3266](https://github.com/pypeclub/OpenPype/pull/3266) +- Nuke: bake reformat was failing on string type [\#3261](https://github.com/pypeclub/OpenPype/pull/3261) +- Maya: hotfix Pxr multitexture in looks [\#3260](https://github.com/pypeclub/OpenPype/pull/3260) +- Unreal: Fix Camera Loading if Layout is missing [\#3255](https://github.com/pypeclub/OpenPype/pull/3255) +- Unreal: Fixed Animation loading in UE5 [\#3240](https://github.com/pypeclub/OpenPype/pull/3240) +- Unreal: Fixed Render creation in UE5 [\#3239](https://github.com/pypeclub/OpenPype/pull/3239) +- Unreal: Fixed Camera loading in UE5 [\#3238](https://github.com/pypeclub/OpenPype/pull/3238) +- Flame: debugging [\#3224](https://github.com/pypeclub/OpenPype/pull/3224) +- add silent audio to slate [\#3162](https://github.com/pypeclub/OpenPype/pull/3162) +- Add timecode to slate [\#2929](https://github.com/pypeclub/OpenPype/pull/2929) + +**🔀 Refactored code** + +- Blender: Use client query functions [\#3331](https://github.com/pypeclub/OpenPype/pull/3331) +- General: Define query functions [\#3288](https://github.com/pypeclub/OpenPype/pull/3288) + +**Merged pull requests:** + +- Maya: add pointcache family to gpu cache loader [\#3318](https://github.com/pypeclub/OpenPype/pull/3318) +- Maya look: skip empty file attributes [\#3274](https://github.com/pypeclub/OpenPype/pull/3274) ## [3.10.0](https://github.com/pypeclub/OpenPype/tree/3.10.0) (2022-05-26) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.10.0-nightly.6...3.10.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.8...3.10.0) + +### 📖 Documentation + +- Docs: add all-contributors config and initial list [\#3094](https://github.com/pypeclub/OpenPype/pull/3094) +- Nuke docs with videos [\#3052](https://github.com/pypeclub/OpenPype/pull/3052) + +**🆕 New features** + +- General: OpenPype modules publish plugins are registered in host [\#3180](https://github.com/pypeclub/OpenPype/pull/3180) +- General: Creator plugins from addons can be registered [\#3179](https://github.com/pypeclub/OpenPype/pull/3179) +- Ftrack: Single image reviewable [\#3157](https://github.com/pypeclub/OpenPype/pull/3157) +- Nuke: Expose write attributes to settings [\#3123](https://github.com/pypeclub/OpenPype/pull/3123) +- Hiero: Initial frame publish support [\#3106](https://github.com/pypeclub/OpenPype/pull/3106) +- Unreal: Render Publishing [\#2917](https://github.com/pypeclub/OpenPype/pull/2917) +- AfterEffects: Implemented New Publisher [\#2838](https://github.com/pypeclub/OpenPype/pull/2838) +- Unreal: Rendering implementation [\#2410](https://github.com/pypeclub/OpenPype/pull/2410) + +**🚀 Enhancements** + +- Maya: FBX camera export [\#3253](https://github.com/pypeclub/OpenPype/pull/3253) +- General: updating common vendor `scriptmenu` to 1.5.2 [\#3246](https://github.com/pypeclub/OpenPype/pull/3246) +- Project Manager: Allow to paste Tasks into multiple assets at the same time [\#3226](https://github.com/pypeclub/OpenPype/pull/3226) +- Project manager: Sped up project load [\#3216](https://github.com/pypeclub/OpenPype/pull/3216) +- Loader UI: Speed issues of loader with sync server [\#3199](https://github.com/pypeclub/OpenPype/pull/3199) +- Looks: add basic support for Renderman [\#3190](https://github.com/pypeclub/OpenPype/pull/3190) +- Maya: added clean\_import option to Import loader [\#3181](https://github.com/pypeclub/OpenPype/pull/3181) +- Add the scripts menu definition to nuke [\#3168](https://github.com/pypeclub/OpenPype/pull/3168) +- Maya: add maya 2023 to default applications [\#3167](https://github.com/pypeclub/OpenPype/pull/3167) +- Compressed bgeo publishing in SAP and Houdini loader [\#3153](https://github.com/pypeclub/OpenPype/pull/3153) +- General: Add 'dataclasses' to required python modules [\#3149](https://github.com/pypeclub/OpenPype/pull/3149) +- Hooks: Tweak logging grammar [\#3147](https://github.com/pypeclub/OpenPype/pull/3147) +- Nuke: settings for reformat node in CreateWriteRender node [\#3143](https://github.com/pypeclub/OpenPype/pull/3143) +- Houdini: Add loader for alembic through Alembic Archive node [\#3140](https://github.com/pypeclub/OpenPype/pull/3140) +- Publisher: UI Modifications and fixes [\#3139](https://github.com/pypeclub/OpenPype/pull/3139) +- General: Simplified OP modules/addons import [\#3137](https://github.com/pypeclub/OpenPype/pull/3137) +- Terminal: Tweak coloring of TrayModuleManager logging enabled states [\#3133](https://github.com/pypeclub/OpenPype/pull/3133) +- General: Cleanup some Loader docstrings [\#3131](https://github.com/pypeclub/OpenPype/pull/3131) +- Nuke: render instance with subset name filtered overrides [\#3117](https://github.com/pypeclub/OpenPype/pull/3117) +- Unreal: Layout and Camera update and remove functions reimplemented and improvements [\#3116](https://github.com/pypeclub/OpenPype/pull/3116) +- Settings: Remove environment groups from settings [\#3115](https://github.com/pypeclub/OpenPype/pull/3115) +- TVPaint: Match renderlayer key with other hosts [\#3110](https://github.com/pypeclub/OpenPype/pull/3110) +- Ftrack: AssetVersion status on publish [\#3108](https://github.com/pypeclub/OpenPype/pull/3108) +- Tray publisher: Simple families from settings [\#3105](https://github.com/pypeclub/OpenPype/pull/3105) +- Local Settings UI: Overlay messages on save and reset [\#3104](https://github.com/pypeclub/OpenPype/pull/3104) +- General: Remove repos related logic [\#3087](https://github.com/pypeclub/OpenPype/pull/3087) +- Standalone publisher: add support for bgeo and vdb [\#3080](https://github.com/pypeclub/OpenPype/pull/3080) +- Houdini: Fix FPS + outdated content pop-ups [\#3079](https://github.com/pypeclub/OpenPype/pull/3079) +- General: Add global log verbose arguments [\#3070](https://github.com/pypeclub/OpenPype/pull/3070) +- Flame: extract presets distribution [\#3063](https://github.com/pypeclub/OpenPype/pull/3063) +- Update collect\_render.py [\#3055](https://github.com/pypeclub/OpenPype/pull/3055) +- SiteSync: Added compute\_resource\_sync\_sites to sync\_server\_module [\#2983](https://github.com/pypeclub/OpenPype/pull/2983) +- Maya: Implement Hardware Renderer 2.0 support for Render Products [\#2611](https://github.com/pypeclub/OpenPype/pull/2611) + +**🐛 Bug fixes** + +- nuke: use framerange issue [\#3254](https://github.com/pypeclub/OpenPype/pull/3254) +- Ftrack: Chunk sizes for queries has minimal condition [\#3244](https://github.com/pypeclub/OpenPype/pull/3244) +- Maya: renderman displays needs to be filtered [\#3242](https://github.com/pypeclub/OpenPype/pull/3242) +- Ftrack: Validate that the user exists on ftrack [\#3237](https://github.com/pypeclub/OpenPype/pull/3237) +- Maya: Fix support for multiple resolutions [\#3236](https://github.com/pypeclub/OpenPype/pull/3236) +- TVPaint: Look for more groups than 12 [\#3228](https://github.com/pypeclub/OpenPype/pull/3228) +- Hiero: debugging frame range and other 3.10 [\#3222](https://github.com/pypeclub/OpenPype/pull/3222) +- Project Manager: Fix persistent editors on project change [\#3218](https://github.com/pypeclub/OpenPype/pull/3218) +- Deadline: instance data overwrite fix [\#3214](https://github.com/pypeclub/OpenPype/pull/3214) +- Ftrack: Push hierarchical attributes action works [\#3210](https://github.com/pypeclub/OpenPype/pull/3210) +- Standalone Publisher: Always create new representation for thumbnail [\#3203](https://github.com/pypeclub/OpenPype/pull/3203) +- Photoshop: skip collector when automatic testing [\#3202](https://github.com/pypeclub/OpenPype/pull/3202) +- Nuke: render/workfile version sync doesn't work on farm [\#3185](https://github.com/pypeclub/OpenPype/pull/3185) +- Ftrack: Review image only if there are no mp4 reviews [\#3183](https://github.com/pypeclub/OpenPype/pull/3183) +- Ftrack: Locations deepcopy issue [\#3177](https://github.com/pypeclub/OpenPype/pull/3177) +- General: Avoid creating multiple thumbnails [\#3176](https://github.com/pypeclub/OpenPype/pull/3176) +- General/Hiero: better clip duration calculation [\#3169](https://github.com/pypeclub/OpenPype/pull/3169) +- General: Oiio conversion for ffmpeg checks for invalid characters [\#3166](https://github.com/pypeclub/OpenPype/pull/3166) +- Fix for attaching render to subset [\#3164](https://github.com/pypeclub/OpenPype/pull/3164) +- Harmony: fixed missing task name in render instance [\#3163](https://github.com/pypeclub/OpenPype/pull/3163) +- Ftrack: Action delete old versions formatting works [\#3152](https://github.com/pypeclub/OpenPype/pull/3152) +- Deadline: fix the output directory [\#3144](https://github.com/pypeclub/OpenPype/pull/3144) +- General: New Session schema [\#3141](https://github.com/pypeclub/OpenPype/pull/3141) +- General: Missing version on headless mode crash properly [\#3136](https://github.com/pypeclub/OpenPype/pull/3136) +- TVPaint: Composite layers in reversed order [\#3135](https://github.com/pypeclub/OpenPype/pull/3135) +- Nuke: fixing default settings for workfile builder loaders [\#3120](https://github.com/pypeclub/OpenPype/pull/3120) +- Nuke: fix anatomy imageio regex default [\#3119](https://github.com/pypeclub/OpenPype/pull/3119) +- General: Python 3 compatibility in queries [\#3112](https://github.com/pypeclub/OpenPype/pull/3112) +- General: TemplateResult can be copied [\#3099](https://github.com/pypeclub/OpenPype/pull/3099) +- General: Collect loaded versions skips not existing representations [\#3095](https://github.com/pypeclub/OpenPype/pull/3095) +- RoyalRender Control Submission - AVALON\_APP\_NAME default [\#3091](https://github.com/pypeclub/OpenPype/pull/3091) +- Ftrack: Update Create Folders action [\#3089](https://github.com/pypeclub/OpenPype/pull/3089) +- Maya: Collect Render fix any render cameras check [\#3088](https://github.com/pypeclub/OpenPype/pull/3088) +- Project Manager: Avoid unnecessary updates of asset documents [\#3083](https://github.com/pypeclub/OpenPype/pull/3083) +- Standalone publisher: Fix plugins install [\#3077](https://github.com/pypeclub/OpenPype/pull/3077) +- General: Extract review sequence is not converted with same names [\#3076](https://github.com/pypeclub/OpenPype/pull/3076) +- Webpublisher: Use variant value [\#3068](https://github.com/pypeclub/OpenPype/pull/3068) +- Nuke: Add aov matching even for remainder and prerender [\#3060](https://github.com/pypeclub/OpenPype/pull/3060) +- Fix support for Renderman in Maya [\#3006](https://github.com/pypeclub/OpenPype/pull/3006) + +**🔀 Refactored code** + +- Avalon repo removed from Jobs workflow [\#3193](https://github.com/pypeclub/OpenPype/pull/3193) +- General: Remove remaining imports from avalon [\#3130](https://github.com/pypeclub/OpenPype/pull/3130) +- General: Move mongo db logic and remove avalon repository [\#3066](https://github.com/pypeclub/OpenPype/pull/3066) +- General: Move host install [\#3009](https://github.com/pypeclub/OpenPype/pull/3009) + +**Merged pull requests:** + +- Harmony: message length in 21.1 [\#3257](https://github.com/pypeclub/OpenPype/pull/3257) +- Harmony: 21.1 fix [\#3249](https://github.com/pypeclub/OpenPype/pull/3249) +- Maya: added jpg to filter for Image Plane Loader [\#3223](https://github.com/pypeclub/OpenPype/pull/3223) +- Webpublisher: replace space by underscore in subset names [\#3160](https://github.com/pypeclub/OpenPype/pull/3160) +- StandalonePublisher: removed Extract Background plugins [\#3093](https://github.com/pypeclub/OpenPype/pull/3093) +- Nuke: added suspend\_publish knob [\#3078](https://github.com/pypeclub/OpenPype/pull/3078) +- Bump async from 2.6.3 to 2.6.4 in /website [\#3065](https://github.com/pypeclub/OpenPype/pull/3065) +- SiteSync: Download all workfile inputs [\#2966](https://github.com/pypeclub/OpenPype/pull/2966) +- Photoshop: New Publisher [\#2933](https://github.com/pypeclub/OpenPype/pull/2933) +- Bump pillow from 9.0.0 to 9.0.1 [\#2880](https://github.com/pypeclub/OpenPype/pull/2880) +- AfterEffects: Allow configuration of default variant via Settings [\#2856](https://github.com/pypeclub/OpenPype/pull/2856) ## [3.9.8](https://github.com/pypeclub/OpenPype/tree/3.9.8) (2022-05-19) @@ -172,87 +778,1085 @@ ## [3.9.5](https://github.com/pypeclub/OpenPype/tree/3.9.5) (2022-04-25) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.10.0-nightly.2...3.9.5) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.4...3.9.5) ## [3.9.4](https://github.com/pypeclub/OpenPype/tree/3.9.4) (2022-04-15) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.4-nightly.2...3.9.4) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.3...3.9.4) + +### 📖 Documentation + +- Documentation: more info about Tasks [\#3062](https://github.com/pypeclub/OpenPype/pull/3062) +- Documentation: Python requirements to 3.7.9 [\#3035](https://github.com/pypeclub/OpenPype/pull/3035) +- Website Docs: Remove unused pages [\#2974](https://github.com/pypeclub/OpenPype/pull/2974) + +**🆕 New features** + +- General: Local overrides for environment variables [\#3045](https://github.com/pypeclub/OpenPype/pull/3045) +- Flame: Flare integration preparation [\#2928](https://github.com/pypeclub/OpenPype/pull/2928) + +**🚀 Enhancements** + +- TVPaint: Added init file for worker to triggers missing sound file dialog [\#3053](https://github.com/pypeclub/OpenPype/pull/3053) +- Ftrack: Custom attributes can be filled in slate values [\#3036](https://github.com/pypeclub/OpenPype/pull/3036) +- Resolve environment variable in google drive credential path [\#3008](https://github.com/pypeclub/OpenPype/pull/3008) + +**🐛 Bug fixes** + +- GitHub: Updated push-protected action in github workflow [\#3064](https://github.com/pypeclub/OpenPype/pull/3064) +- Nuke: Typos in imports from Nuke implementation [\#3061](https://github.com/pypeclub/OpenPype/pull/3061) +- Hotfix: fixing deadline job publishing [\#3059](https://github.com/pypeclub/OpenPype/pull/3059) +- General: Extract Review handle invalid characters for ffmpeg [\#3050](https://github.com/pypeclub/OpenPype/pull/3050) +- Slate Review: Support to keep format on slate concatenation [\#3049](https://github.com/pypeclub/OpenPype/pull/3049) +- Webpublisher: fix processing of workfile [\#3048](https://github.com/pypeclub/OpenPype/pull/3048) +- Ftrack: Integrate ftrack api fix [\#3044](https://github.com/pypeclub/OpenPype/pull/3044) +- Webpublisher - removed wrong hardcoded family [\#3043](https://github.com/pypeclub/OpenPype/pull/3043) +- LibraryLoader: Use current project for asset query in families filter [\#3042](https://github.com/pypeclub/OpenPype/pull/3042) +- SiteSync: Providers ignore that site is disabled [\#3041](https://github.com/pypeclub/OpenPype/pull/3041) +- Unreal: Creator import fixes [\#3040](https://github.com/pypeclub/OpenPype/pull/3040) +- SiteSync: fix transitive alternate sites, fix dropdown in Local Settings [\#3018](https://github.com/pypeclub/OpenPype/pull/3018) +- Maya: invalid review flag on rendered AOVs [\#2915](https://github.com/pypeclub/OpenPype/pull/2915) + +**Merged pull requests:** + +- Deadline: reworked pools assignment [\#3051](https://github.com/pypeclub/OpenPype/pull/3051) +- Houdini: Avoid ImportError on `hdefereval` when Houdini runs without UI [\#2987](https://github.com/pypeclub/OpenPype/pull/2987) ## [3.9.3](https://github.com/pypeclub/OpenPype/tree/3.9.3) (2022-04-07) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.3-nightly.2...3.9.3) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.2...3.9.3) + +### 📖 Documentation + +- Documentation: Added mention of adding My Drive as a root [\#2999](https://github.com/pypeclub/OpenPype/pull/2999) +- Website Docs: Manager Ftrack fix broken links [\#2979](https://github.com/pypeclub/OpenPype/pull/2979) +- Docs: Added MongoDB requirements [\#2951](https://github.com/pypeclub/OpenPype/pull/2951) +- Documentation: New publisher develop docs [\#2896](https://github.com/pypeclub/OpenPype/pull/2896) + +**🆕 New features** + +- Ftrack: Add description integrator [\#3027](https://github.com/pypeclub/OpenPype/pull/3027) +- nuke: bypass baking [\#2992](https://github.com/pypeclub/OpenPype/pull/2992) +- Publishing textures for Unreal [\#2988](https://github.com/pypeclub/OpenPype/pull/2988) +- Maya to Unreal: Static and Skeletal Meshes [\#2978](https://github.com/pypeclub/OpenPype/pull/2978) +- Multiverse: Initial Support [\#2908](https://github.com/pypeclub/OpenPype/pull/2908) + +**🚀 Enhancements** + +- General: default workfile subset name for workfile [\#3011](https://github.com/pypeclub/OpenPype/pull/3011) +- Ftrack: Add more options for note text of integrate ftrack note [\#3025](https://github.com/pypeclub/OpenPype/pull/3025) +- Console Interpreter: Changed how console splitter size are reused on show [\#3016](https://github.com/pypeclub/OpenPype/pull/3016) +- Deadline: Use more suitable name for sequence review logic [\#3015](https://github.com/pypeclub/OpenPype/pull/3015) +- Nuke: add concurrency attr to deadline job [\#3005](https://github.com/pypeclub/OpenPype/pull/3005) +- Photoshop: create image without instance [\#3001](https://github.com/pypeclub/OpenPype/pull/3001) +- TVPaint: Render scene family [\#3000](https://github.com/pypeclub/OpenPype/pull/3000) +- Deadline: priority configurable in Maya jobs [\#2995](https://github.com/pypeclub/OpenPype/pull/2995) +- Nuke: ReviewDataMov Read RAW attribute [\#2985](https://github.com/pypeclub/OpenPype/pull/2985) +- General: `METADATA_KEYS` constant as `frozenset` for optimal immutable lookup [\#2980](https://github.com/pypeclub/OpenPype/pull/2980) +- General: Tools with host filters [\#2975](https://github.com/pypeclub/OpenPype/pull/2975) +- Hero versions: Use custom templates [\#2967](https://github.com/pypeclub/OpenPype/pull/2967) +- Slack: Added configurable maximum file size of review upload to Slack [\#2945](https://github.com/pypeclub/OpenPype/pull/2945) +- NewPublisher: Prepared implementation of optional pyblish plugin [\#2943](https://github.com/pypeclub/OpenPype/pull/2943) +- TVPaint: Extractor to convert PNG into EXR [\#2942](https://github.com/pypeclub/OpenPype/pull/2942) +- Workfiles tool: Save as published workfiles [\#2937](https://github.com/pypeclub/OpenPype/pull/2937) +- Workfiles: Open published workfiles [\#2925](https://github.com/pypeclub/OpenPype/pull/2925) +- General: Default modules loaded dynamically [\#2923](https://github.com/pypeclub/OpenPype/pull/2923) +- CI: change the version bump logic [\#2919](https://github.com/pypeclub/OpenPype/pull/2919) +- Deadline: Add headless argument [\#2916](https://github.com/pypeclub/OpenPype/pull/2916) +- Nuke: Add no-audio Tag [\#2911](https://github.com/pypeclub/OpenPype/pull/2911) +- Ftrack: Fill workfile in custom attribute [\#2906](https://github.com/pypeclub/OpenPype/pull/2906) +- Nuke: improving readability [\#2903](https://github.com/pypeclub/OpenPype/pull/2903) +- Settings UI: Add simple tooltips for settings entities [\#2901](https://github.com/pypeclub/OpenPype/pull/2901) + +**🐛 Bug fixes** + +- General: Fix validate asset docs plug-in filename and class name [\#3029](https://github.com/pypeclub/OpenPype/pull/3029) +- Deadline: Fixed default value of use sequence for review [\#3033](https://github.com/pypeclub/OpenPype/pull/3033) +- Settings UI: Version column can be extended so version are visible [\#3032](https://github.com/pypeclub/OpenPype/pull/3032) +- General: Fix import after movements [\#3028](https://github.com/pypeclub/OpenPype/pull/3028) +- Harmony: Added creating subset name for workfile from template [\#3024](https://github.com/pypeclub/OpenPype/pull/3024) +- AfterEffects: Added creating subset name for workfile from template [\#3023](https://github.com/pypeclub/OpenPype/pull/3023) +- General: Add example addons to ignored [\#3022](https://github.com/pypeclub/OpenPype/pull/3022) +- Maya: Remove missing import [\#3017](https://github.com/pypeclub/OpenPype/pull/3017) +- Ftrack: multiple reviewable componets [\#3012](https://github.com/pypeclub/OpenPype/pull/3012) +- Tray publisher: Fixes after code movement [\#3010](https://github.com/pypeclub/OpenPype/pull/3010) +- Hosts: Remove path existence checks in 'add\_implementation\_envs' [\#3004](https://github.com/pypeclub/OpenPype/pull/3004) +- Nuke: fixing unicode type detection in effect loaders [\#3002](https://github.com/pypeclub/OpenPype/pull/3002) +- Fix - remove doubled dot in workfile created from template [\#2998](https://github.com/pypeclub/OpenPype/pull/2998) +- Nuke: removing redundant Ftrack asset when farm publishing [\#2996](https://github.com/pypeclub/OpenPype/pull/2996) +- PS: fix renaming subset incorrectly in PS [\#2991](https://github.com/pypeclub/OpenPype/pull/2991) +- Fix: Disable setuptools auto discovery [\#2990](https://github.com/pypeclub/OpenPype/pull/2990) +- AEL: fix opening existing workfile if no scene opened [\#2989](https://github.com/pypeclub/OpenPype/pull/2989) +- Maya: Don't do hardlinks on windows for look publishing [\#2986](https://github.com/pypeclub/OpenPype/pull/2986) +- Settings UI: Fix version completer on linux [\#2981](https://github.com/pypeclub/OpenPype/pull/2981) +- Photoshop: Fix creation of subset names in PS review and workfile [\#2969](https://github.com/pypeclub/OpenPype/pull/2969) +- Slack: Added default for review\_upload\_limit for Slack [\#2965](https://github.com/pypeclub/OpenPype/pull/2965) +- General: OIIO conversion for ffmeg can handle sequences [\#2958](https://github.com/pypeclub/OpenPype/pull/2958) +- Settings: Conditional dictionary avoid invalid logs [\#2956](https://github.com/pypeclub/OpenPype/pull/2956) +- General: Smaller fixes and typos [\#2950](https://github.com/pypeclub/OpenPype/pull/2950) +- LogViewer: Don't refresh on initialization [\#2949](https://github.com/pypeclub/OpenPype/pull/2949) +- nuke: python3 compatibility issue with `iteritems` [\#2948](https://github.com/pypeclub/OpenPype/pull/2948) +- General: anatomy data with correct task short key [\#2947](https://github.com/pypeclub/OpenPype/pull/2947) +- SceneInventory: Fix imports in UI [\#2944](https://github.com/pypeclub/OpenPype/pull/2944) +- Slack: add generic exception [\#2941](https://github.com/pypeclub/OpenPype/pull/2941) +- General: Python specific vendor paths on env injection [\#2939](https://github.com/pypeclub/OpenPype/pull/2939) +- General: More fail safe delete old versions [\#2936](https://github.com/pypeclub/OpenPype/pull/2936) +- Settings UI: Collapsed of collapsible wrapper works as expected [\#2934](https://github.com/pypeclub/OpenPype/pull/2934) +- Maya: Do not pass `set` to maya commands \(fixes support for older maya versions\) [\#2932](https://github.com/pypeclub/OpenPype/pull/2932) +- General: Don't print log record on OSError [\#2926](https://github.com/pypeclub/OpenPype/pull/2926) +- Hiero: Fix import of 'register\_event\_callback' [\#2924](https://github.com/pypeclub/OpenPype/pull/2924) +- Flame: centos related debugging [\#2922](https://github.com/pypeclub/OpenPype/pull/2922) +- Ftrack: Missing Ftrack id after editorial publish [\#2905](https://github.com/pypeclub/OpenPype/pull/2905) +- AfterEffects: Fix rendering for single frame in DL [\#2875](https://github.com/pypeclub/OpenPype/pull/2875) + +**🔀 Refactored code** + +- General: Move plugins register and discover [\#2935](https://github.com/pypeclub/OpenPype/pull/2935) +- General: Move Attribute Definitions from pipeline [\#2931](https://github.com/pypeclub/OpenPype/pull/2931) +- General: Removed silo references and terminal splash [\#2927](https://github.com/pypeclub/OpenPype/pull/2927) +- General: Move pipeline constants to OpenPype [\#2918](https://github.com/pypeclub/OpenPype/pull/2918) +- General: Move formatting and workfile functions [\#2914](https://github.com/pypeclub/OpenPype/pull/2914) +- General: Move remaining plugins from avalon [\#2912](https://github.com/pypeclub/OpenPype/pull/2912) + +**Merged pull requests:** + +- Maya: Allow to select invalid camera contents if no cameras found [\#3030](https://github.com/pypeclub/OpenPype/pull/3030) +- Bump paramiko from 2.9.2 to 2.10.1 [\#2973](https://github.com/pypeclub/OpenPype/pull/2973) +- Bump minimist from 1.2.5 to 1.2.6 in /website [\#2954](https://github.com/pypeclub/OpenPype/pull/2954) +- Bump node-forge from 1.2.1 to 1.3.0 in /website [\#2953](https://github.com/pypeclub/OpenPype/pull/2953) +- Maya - added transparency into review creator [\#2952](https://github.com/pypeclub/OpenPype/pull/2952) ## [3.9.2](https://github.com/pypeclub/OpenPype/tree/3.9.2) (2022-04-04) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.2-nightly.4...3.9.2) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.1...3.9.2) ## [3.9.1](https://github.com/pypeclub/OpenPype/tree/3.9.1) (2022-03-18) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.1-nightly.3...3.9.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.0...3.9.1) + +**🚀 Enhancements** + +- General: Change how OPENPYPE\_DEBUG value is handled [\#2907](https://github.com/pypeclub/OpenPype/pull/2907) +- nuke: imageio adding ocio config version 1.2 [\#2897](https://github.com/pypeclub/OpenPype/pull/2897) +- Flame: support for comment with xml attribute overrides [\#2892](https://github.com/pypeclub/OpenPype/pull/2892) +- Nuke: ExtractReviewSlate can handle more codes and profiles [\#2879](https://github.com/pypeclub/OpenPype/pull/2879) +- Flame: sequence used for reference video [\#2869](https://github.com/pypeclub/OpenPype/pull/2869) + +**🐛 Bug fixes** + +- General: Fix use of Anatomy roots [\#2904](https://github.com/pypeclub/OpenPype/pull/2904) +- Fixing gap detection in extract review [\#2902](https://github.com/pypeclub/OpenPype/pull/2902) +- Pyblish Pype - ensure current state is correct when entering new group order [\#2899](https://github.com/pypeclub/OpenPype/pull/2899) +- SceneInventory: Fix import of load function [\#2894](https://github.com/pypeclub/OpenPype/pull/2894) +- Harmony - fixed creator issue [\#2891](https://github.com/pypeclub/OpenPype/pull/2891) +- General: Remove forgotten use of avalon Creator [\#2885](https://github.com/pypeclub/OpenPype/pull/2885) +- General: Avoid circular import [\#2884](https://github.com/pypeclub/OpenPype/pull/2884) +- Fixes for attaching loaded containers \(\#2837\) [\#2874](https://github.com/pypeclub/OpenPype/pull/2874) +- Maya: Deformer node ids validation plugin [\#2826](https://github.com/pypeclub/OpenPype/pull/2826) +- Flame Babypublisher optimalization [\#2806](https://github.com/pypeclub/OpenPype/pull/2806) +- hotfix: OIIO tool path - add extension on windows [\#2618](https://github.com/pypeclub/OpenPype/pull/2618) + +**🔀 Refactored code** + +- General: Reduce style usage to OpenPype repository [\#2889](https://github.com/pypeclub/OpenPype/pull/2889) +- General: Move loader logic from avalon to openpype [\#2886](https://github.com/pypeclub/OpenPype/pull/2886) ## [3.9.0](https://github.com/pypeclub/OpenPype/tree/3.9.0) (2022-03-14) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.0-nightly.9...3.9.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.8.2...3.9.0) + +**Deprecated:** + +- Houdini: Remove unused code [\#2779](https://github.com/pypeclub/OpenPype/pull/2779) +- Loader: Remove default family states for hosts from code [\#2706](https://github.com/pypeclub/OpenPype/pull/2706) +- AssetCreator: Remove the tool [\#2845](https://github.com/pypeclub/OpenPype/pull/2845) + +### 📖 Documentation + +- Documentation: fixed broken links [\#2799](https://github.com/pypeclub/OpenPype/pull/2799) +- Documentation: broken link fix [\#2785](https://github.com/pypeclub/OpenPype/pull/2785) +- Documentation: link fixes [\#2772](https://github.com/pypeclub/OpenPype/pull/2772) +- Update docusaurus to latest version [\#2760](https://github.com/pypeclub/OpenPype/pull/2760) +- Various testing updates [\#2726](https://github.com/pypeclub/OpenPype/pull/2726) +- documentation: add example to `repack-version` command [\#2669](https://github.com/pypeclub/OpenPype/pull/2669) +- Update docusaurus [\#2639](https://github.com/pypeclub/OpenPype/pull/2639) +- Documentation: Fixed relative links [\#2621](https://github.com/pypeclub/OpenPype/pull/2621) +- Documentation: Change Photoshop & AfterEffects plugin path [\#2878](https://github.com/pypeclub/OpenPype/pull/2878) + +**🆕 New features** + +- Flame: loading clips to reels [\#2622](https://github.com/pypeclub/OpenPype/pull/2622) +- General: Store settings by OpenPype version [\#2570](https://github.com/pypeclub/OpenPype/pull/2570) + +**🚀 Enhancements** + +- New: Validation exceptions [\#2841](https://github.com/pypeclub/OpenPype/pull/2841) +- General: Set context environments for non host applications [\#2803](https://github.com/pypeclub/OpenPype/pull/2803) +- Houdini: Remove duplicate ValidateOutputNode plug-in [\#2780](https://github.com/pypeclub/OpenPype/pull/2780) +- Tray publisher: New Tray Publisher host \(beta\) [\#2778](https://github.com/pypeclub/OpenPype/pull/2778) +- Slack: Added regex for filtering on subset names [\#2775](https://github.com/pypeclub/OpenPype/pull/2775) +- Houdini: Implement Reset Frame Range [\#2770](https://github.com/pypeclub/OpenPype/pull/2770) +- Pyblish Pype: Remove redundant new line in installed fonts printing [\#2758](https://github.com/pypeclub/OpenPype/pull/2758) +- Flame: use Shot Name on segment for asset name [\#2751](https://github.com/pypeclub/OpenPype/pull/2751) +- Flame: adding validator source clip [\#2746](https://github.com/pypeclub/OpenPype/pull/2746) +- Work Files: Preserve subversion comment of current filename by default [\#2734](https://github.com/pypeclub/OpenPype/pull/2734) +- Maya: set Deadline job/batch name to original source workfile name instead of published workfile [\#2733](https://github.com/pypeclub/OpenPype/pull/2733) +- Ftrack: Disable ftrack module by default [\#2732](https://github.com/pypeclub/OpenPype/pull/2732) +- Project Manager: Disable add task, add asset and save button when not in a project [\#2727](https://github.com/pypeclub/OpenPype/pull/2727) +- dropbox handle big file [\#2718](https://github.com/pypeclub/OpenPype/pull/2718) +- Fusion Move PR: Minor tweaks to Fusion integration [\#2716](https://github.com/pypeclub/OpenPype/pull/2716) +- RoyalRender: Minor enhancements [\#2700](https://github.com/pypeclub/OpenPype/pull/2700) +- Nuke: prerender with review knob [\#2691](https://github.com/pypeclub/OpenPype/pull/2691) +- Maya configurable unit validator [\#2680](https://github.com/pypeclub/OpenPype/pull/2680) +- General: Add settings for CleanUpFarm and disable the plugin by default [\#2679](https://github.com/pypeclub/OpenPype/pull/2679) +- Project Manager: Only allow scroll wheel edits when spinbox is active [\#2678](https://github.com/pypeclub/OpenPype/pull/2678) +- Ftrack: Sync description to assets [\#2670](https://github.com/pypeclub/OpenPype/pull/2670) +- Houdini: Moved to OpenPype [\#2658](https://github.com/pypeclub/OpenPype/pull/2658) +- Maya: Move implementation to OpenPype [\#2649](https://github.com/pypeclub/OpenPype/pull/2649) +- General: FFmpeg conversion also check attribute string length [\#2635](https://github.com/pypeclub/OpenPype/pull/2635) +- Houdini: Load Arnold .ass procedurals into Houdini [\#2606](https://github.com/pypeclub/OpenPype/pull/2606) +- Deadline: Simplify GlobalJobPreLoad logic [\#2605](https://github.com/pypeclub/OpenPype/pull/2605) +- Houdini: Implement Arnold .ass standin extraction from Houdini \(also support .ass.gz\) [\#2603](https://github.com/pypeclub/OpenPype/pull/2603) +- New Publisher: New features and preparations for new standalone publisher [\#2556](https://github.com/pypeclub/OpenPype/pull/2556) +- Fix Maya 2022 Python 3 compatibility [\#2445](https://github.com/pypeclub/OpenPype/pull/2445) +- TVPaint: Use new publisher exceptions in validators [\#2435](https://github.com/pypeclub/OpenPype/pull/2435) +- Harmony: Added new style validations for New Publisher [\#2434](https://github.com/pypeclub/OpenPype/pull/2434) +- Aftereffects: New style validations for New publisher [\#2430](https://github.com/pypeclub/OpenPype/pull/2430) +- Farm publishing: New cleanup plugin for Maya renders on farm [\#2390](https://github.com/pypeclub/OpenPype/pull/2390) +- General: Subset name filtering in ExtractReview outpus [\#2872](https://github.com/pypeclub/OpenPype/pull/2872) +- NewPublisher: Descriptions and Icons in creator dialog [\#2867](https://github.com/pypeclub/OpenPype/pull/2867) +- NewPublisher: Changing task on publishing instance [\#2863](https://github.com/pypeclub/OpenPype/pull/2863) +- TrayPublisher: Choose project widget is more clear [\#2859](https://github.com/pypeclub/OpenPype/pull/2859) +- Maya: add loaded containers to published instance [\#2837](https://github.com/pypeclub/OpenPype/pull/2837) +- Ftrack: Can sync fps as string [\#2836](https://github.com/pypeclub/OpenPype/pull/2836) +- General: Custom function for find executable [\#2822](https://github.com/pypeclub/OpenPype/pull/2822) +- General: Color dialog UI fixes [\#2817](https://github.com/pypeclub/OpenPype/pull/2817) +- global: letter box calculated on output as last process [\#2812](https://github.com/pypeclub/OpenPype/pull/2812) +- Nuke: adding Reformat to baking mov plugin [\#2811](https://github.com/pypeclub/OpenPype/pull/2811) +- Manager: Update all to latest button [\#2805](https://github.com/pypeclub/OpenPype/pull/2805) +- Houdini: Move Houdini Save Current File to beginning of ExtractorOrder [\#2747](https://github.com/pypeclub/OpenPype/pull/2747) +- Global: adding studio name/code to anatomy template formatting data [\#2630](https://github.com/pypeclub/OpenPype/pull/2630) + +**🐛 Bug fixes** + +- Settings UI: Search case sensitivity [\#2810](https://github.com/pypeclub/OpenPype/pull/2810) +- resolve: fixing fusion module loading [\#2802](https://github.com/pypeclub/OpenPype/pull/2802) +- Ftrack: Unset task ids from asset versions before tasks are removed [\#2800](https://github.com/pypeclub/OpenPype/pull/2800) +- Slack: fail gracefully if slack exception [\#2798](https://github.com/pypeclub/OpenPype/pull/2798) +- Flame: Fix version string in default settings [\#2783](https://github.com/pypeclub/OpenPype/pull/2783) +- After Effects: Fix typo in name `afftereffects` -\> `aftereffects` [\#2768](https://github.com/pypeclub/OpenPype/pull/2768) +- Houdini: Fix open last workfile [\#2767](https://github.com/pypeclub/OpenPype/pull/2767) +- Avoid renaming udim indexes [\#2765](https://github.com/pypeclub/OpenPype/pull/2765) +- Maya: Fix `unique_namespace` when in an namespace that is empty [\#2759](https://github.com/pypeclub/OpenPype/pull/2759) +- Loader UI: Fix right click in representation widget [\#2757](https://github.com/pypeclub/OpenPype/pull/2757) +- Harmony: Rendering in Deadline didn't work in other machines than submitter [\#2754](https://github.com/pypeclub/OpenPype/pull/2754) +- Aftereffects 2022 and Deadline [\#2748](https://github.com/pypeclub/OpenPype/pull/2748) +- Flame: bunch of bugs [\#2745](https://github.com/pypeclub/OpenPype/pull/2745) +- Maya: Save current scene on workfile publish [\#2744](https://github.com/pypeclub/OpenPype/pull/2744) +- Version Up: Preserve parts of filename after version number \(like subversion\) on version\_up [\#2741](https://github.com/pypeclub/OpenPype/pull/2741) +- Loader UI: Multiple asset selection and underline colors fixed [\#2731](https://github.com/pypeclub/OpenPype/pull/2731) +- General: Fix loading of unused chars in xml format [\#2729](https://github.com/pypeclub/OpenPype/pull/2729) +- TVPaint: Set objectName with members [\#2725](https://github.com/pypeclub/OpenPype/pull/2725) +- General: Don't use 'objectName' from loaded references [\#2715](https://github.com/pypeclub/OpenPype/pull/2715) +- Settings: Studio Project anatomy is queried using right keys [\#2711](https://github.com/pypeclub/OpenPype/pull/2711) +- Local Settings: Additional applications don't break UI [\#2710](https://github.com/pypeclub/OpenPype/pull/2710) +- Maya: Remove some unused code [\#2709](https://github.com/pypeclub/OpenPype/pull/2709) +- Houdini: Fix refactor of Houdini host move for CreateArnoldAss [\#2704](https://github.com/pypeclub/OpenPype/pull/2704) +- LookAssigner: Fix imports after moving code to OpenPype repository [\#2701](https://github.com/pypeclub/OpenPype/pull/2701) +- Multiple hosts: unify menu style across hosts [\#2693](https://github.com/pypeclub/OpenPype/pull/2693) +- Maya Redshift fixes [\#2692](https://github.com/pypeclub/OpenPype/pull/2692) +- Maya: fix fps validation popup [\#2685](https://github.com/pypeclub/OpenPype/pull/2685) +- Houdini Explicitly collect correct frame name even in case of single frame render when `frameStart` is provided [\#2676](https://github.com/pypeclub/OpenPype/pull/2676) +- hiero: fix effect collector name and order [\#2673](https://github.com/pypeclub/OpenPype/pull/2673) +- Maya: Fix menu callbacks [\#2671](https://github.com/pypeclub/OpenPype/pull/2671) +- hiero: removing obsolete unsupported plugin [\#2667](https://github.com/pypeclub/OpenPype/pull/2667) +- Launcher: Fix access to 'data' attribute on actions [\#2659](https://github.com/pypeclub/OpenPype/pull/2659) +- Maya `vrscene` loader fixes [\#2633](https://github.com/pypeclub/OpenPype/pull/2633) +- Houdini: fix usd family in loader and integrators [\#2631](https://github.com/pypeclub/OpenPype/pull/2631) +- Maya: Add only reference node to look family container like with other families [\#2508](https://github.com/pypeclub/OpenPype/pull/2508) +- General: Missing time function [\#2877](https://github.com/pypeclub/OpenPype/pull/2877) +- Deadline: Fix plugin name for tile assemble [\#2868](https://github.com/pypeclub/OpenPype/pull/2868) +- Nuke: gizmo precollect fix [\#2866](https://github.com/pypeclub/OpenPype/pull/2866) +- General: Fix hardlink for windows [\#2864](https://github.com/pypeclub/OpenPype/pull/2864) +- General: ffmpeg was crashing on slate merge [\#2860](https://github.com/pypeclub/OpenPype/pull/2860) +- WebPublisher: Video file was published with one too many frame [\#2858](https://github.com/pypeclub/OpenPype/pull/2858) +- New Publisher: Error dialog got right styles [\#2857](https://github.com/pypeclub/OpenPype/pull/2857) +- General: Fix getattr clalback on dynamic modules [\#2855](https://github.com/pypeclub/OpenPype/pull/2855) +- Nuke: slate resolution to input video resolution [\#2853](https://github.com/pypeclub/OpenPype/pull/2853) +- WebPublisher: Fix username stored in DB [\#2852](https://github.com/pypeclub/OpenPype/pull/2852) +- WebPublisher: Fix wrong number of frames for video file [\#2851](https://github.com/pypeclub/OpenPype/pull/2851) +- Nuke: Fix family test in validate\_write\_legacy to work with stillImage [\#2847](https://github.com/pypeclub/OpenPype/pull/2847) +- Nuke: fix multiple baking profile farm publishing [\#2842](https://github.com/pypeclub/OpenPype/pull/2842) +- Blender: Fixed parameters for FBX export of the camera [\#2840](https://github.com/pypeclub/OpenPype/pull/2840) +- Maya: Stop creation of reviews for Cryptomattes [\#2832](https://github.com/pypeclub/OpenPype/pull/2832) +- Deadline: Remove recreated event [\#2828](https://github.com/pypeclub/OpenPype/pull/2828) +- Deadline: Added missing events folder [\#2827](https://github.com/pypeclub/OpenPype/pull/2827) +- Settings: Missing document with OP versions may break start of OpenPype [\#2825](https://github.com/pypeclub/OpenPype/pull/2825) +- Deadline: more detailed temp file name for environment json [\#2824](https://github.com/pypeclub/OpenPype/pull/2824) +- General: Host name was formed from obsolete code [\#2821](https://github.com/pypeclub/OpenPype/pull/2821) +- Settings UI: Fix "Apply from" action [\#2820](https://github.com/pypeclub/OpenPype/pull/2820) +- Ftrack: Job killer with missing user [\#2819](https://github.com/pypeclub/OpenPype/pull/2819) +- Nuke: Use AVALON\_APP to get value for "app" key [\#2818](https://github.com/pypeclub/OpenPype/pull/2818) +- StandalonePublisher: use dynamic groups in subset names [\#2816](https://github.com/pypeclub/OpenPype/pull/2816) + +**🔀 Refactored code** + +- Ftrack: Moved module one hierarchy level higher [\#2792](https://github.com/pypeclub/OpenPype/pull/2792) +- SyncServer: Moved module one hierarchy level higher [\#2791](https://github.com/pypeclub/OpenPype/pull/2791) +- Royal render: Move module one hierarchy level higher [\#2790](https://github.com/pypeclub/OpenPype/pull/2790) +- Deadline: Move module one hierarchy level higher [\#2789](https://github.com/pypeclub/OpenPype/pull/2789) +- Refactor: move webserver tool to openpype [\#2876](https://github.com/pypeclub/OpenPype/pull/2876) +- General: Move create logic from avalon to OpenPype [\#2854](https://github.com/pypeclub/OpenPype/pull/2854) +- General: Add vendors from avalon [\#2848](https://github.com/pypeclub/OpenPype/pull/2848) +- General: Basic event system [\#2846](https://github.com/pypeclub/OpenPype/pull/2846) +- General: Move change context functions [\#2839](https://github.com/pypeclub/OpenPype/pull/2839) +- Tools: Don't use avalon tools code [\#2829](https://github.com/pypeclub/OpenPype/pull/2829) +- Move Unreal Implementation to OpenPype [\#2823](https://github.com/pypeclub/OpenPype/pull/2823) +- General: Extract template formatting from anatomy [\#2766](https://github.com/pypeclub/OpenPype/pull/2766) + +**Merged pull requests:** + +- Fusion: Moved implementation into OpenPype [\#2713](https://github.com/pypeclub/OpenPype/pull/2713) +- TVPaint: Plugin build without dependencies [\#2705](https://github.com/pypeclub/OpenPype/pull/2705) +- Webpublisher: Photoshop create a beauty png [\#2689](https://github.com/pypeclub/OpenPype/pull/2689) +- Ftrack: Hierarchical attributes are queried properly [\#2682](https://github.com/pypeclub/OpenPype/pull/2682) +- Maya: Add Validate Frame Range settings [\#2661](https://github.com/pypeclub/OpenPype/pull/2661) +- Harmony: move to Openpype [\#2657](https://github.com/pypeclub/OpenPype/pull/2657) +- Maya: cleanup duplicate rendersetup code [\#2642](https://github.com/pypeclub/OpenPype/pull/2642) +- Deadline: Be able to pass Mongo url to job [\#2616](https://github.com/pypeclub/OpenPype/pull/2616) ## [3.8.2](https://github.com/pypeclub/OpenPype/tree/3.8.2) (2022-02-07) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.8.2-nightly.3...3.8.2) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.8.1...3.8.2) + +### 📖 Documentation + +- Cosmetics: Fix common typos in openpype/website [\#2617](https://github.com/pypeclub/OpenPype/pull/2617) + +**🚀 Enhancements** + +- TVPaint: Image loaders also work on review family [\#2638](https://github.com/pypeclub/OpenPype/pull/2638) +- General: Project backup tools [\#2629](https://github.com/pypeclub/OpenPype/pull/2629) +- nuke: adding clear button to write nodes [\#2627](https://github.com/pypeclub/OpenPype/pull/2627) +- Ftrack: Family to Asset type mapping is in settings [\#2602](https://github.com/pypeclub/OpenPype/pull/2602) +- Nuke: load color space from representation data [\#2576](https://github.com/pypeclub/OpenPype/pull/2576) + +**🐛 Bug fixes** + +- Fix pulling of cx\_freeze 6.10 [\#2628](https://github.com/pypeclub/OpenPype/pull/2628) +- Global: fix broken otio review extractor [\#2590](https://github.com/pypeclub/OpenPype/pull/2590) + +**Merged pull requests:** + +- WebPublisher: fix instance duplicates [\#2641](https://github.com/pypeclub/OpenPype/pull/2641) +- Fix - safer pulling of task name for webpublishing from PS [\#2613](https://github.com/pypeclub/OpenPype/pull/2613) ## [3.8.1](https://github.com/pypeclub/OpenPype/tree/3.8.1) (2022-02-01) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.8.1-nightly.3...3.8.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.8.0...3.8.1) + +**🚀 Enhancements** + +- Webpublisher: Thumbnail extractor [\#2600](https://github.com/pypeclub/OpenPype/pull/2600) +- Loader: Allow to toggle default family filters between "include" or "exclude" filtering [\#2541](https://github.com/pypeclub/OpenPype/pull/2541) +- Launcher: Added context menu to to skip opening last workfile [\#2536](https://github.com/pypeclub/OpenPype/pull/2536) +- Unreal: JSON Layout Loading support [\#2066](https://github.com/pypeclub/OpenPype/pull/2066) + +**🐛 Bug fixes** + +- Release/3.8.0 [\#2619](https://github.com/pypeclub/OpenPype/pull/2619) +- Settings: Enum does not store empty string if has single item to select [\#2615](https://github.com/pypeclub/OpenPype/pull/2615) +- switch distutils to sysconfig for `get_platform()` [\#2594](https://github.com/pypeclub/OpenPype/pull/2594) +- Fix poetry index and speedcopy update [\#2589](https://github.com/pypeclub/OpenPype/pull/2589) +- Webpublisher: Fix - subset names from processed .psd used wrong value for task [\#2586](https://github.com/pypeclub/OpenPype/pull/2586) +- `vrscene` creator Deadline webservice URL handling [\#2580](https://github.com/pypeclub/OpenPype/pull/2580) +- global: track name was failing if duplicated root word in name [\#2568](https://github.com/pypeclub/OpenPype/pull/2568) +- Validate Maya Rig produces no cycle errors [\#2484](https://github.com/pypeclub/OpenPype/pull/2484) + +**Merged pull requests:** + +- Bump pillow from 8.4.0 to 9.0.0 [\#2595](https://github.com/pypeclub/OpenPype/pull/2595) +- Webpublisher: Skip version collect [\#2591](https://github.com/pypeclub/OpenPype/pull/2591) +- build\(deps\): bump pillow from 8.4.0 to 9.0.0 [\#2523](https://github.com/pypeclub/OpenPype/pull/2523) ## [3.8.0](https://github.com/pypeclub/OpenPype/tree/3.8.0) (2022-01-24) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.8.0-nightly.7...3.8.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.7.0...3.8.0) + +### 📖 Documentation + +- Variable in docs renamed to proper name [\#2546](https://github.com/pypeclub/OpenPype/pull/2546) + +**🆕 New features** + +- Flame: extracting segments with trans-coding [\#2547](https://github.com/pypeclub/OpenPype/pull/2547) +- Maya : V-Ray Proxy - load all ABC files via proxy [\#2544](https://github.com/pypeclub/OpenPype/pull/2544) +- Maya to Unreal: Extended static mesh workflow [\#2537](https://github.com/pypeclub/OpenPype/pull/2537) +- Flame: collecting publishable instances [\#2519](https://github.com/pypeclub/OpenPype/pull/2519) +- Flame: create publishable clips [\#2495](https://github.com/pypeclub/OpenPype/pull/2495) +- Flame: OpenTimelineIO Export Modul [\#2398](https://github.com/pypeclub/OpenPype/pull/2398) + +**🚀 Enhancements** + +- Webpublisher: Moved error at the beginning of the log [\#2559](https://github.com/pypeclub/OpenPype/pull/2559) +- Ftrack: Use ApplicationManager to get DJV path [\#2558](https://github.com/pypeclub/OpenPype/pull/2558) +- Webpublisher: Added endpoint to reprocess batch through UI [\#2555](https://github.com/pypeclub/OpenPype/pull/2555) +- Settings: PathInput strip passed string [\#2550](https://github.com/pypeclub/OpenPype/pull/2550) +- Global: Exctract Review anatomy fill data with output name [\#2548](https://github.com/pypeclub/OpenPype/pull/2548) +- Cosmetics: Clean up some cosmetics / typos [\#2542](https://github.com/pypeclub/OpenPype/pull/2542) +- General: Validate if current process OpenPype version is requested version [\#2529](https://github.com/pypeclub/OpenPype/pull/2529) +- General: Be able to use anatomy data in ffmpeg output arguments [\#2525](https://github.com/pypeclub/OpenPype/pull/2525) +- Expose toggle publish plug-in settings for Maya Look Shading Engine Naming [\#2521](https://github.com/pypeclub/OpenPype/pull/2521) +- Photoshop: Move implementation to OpenPype [\#2510](https://github.com/pypeclub/OpenPype/pull/2510) +- TimersManager: Move module one hierarchy higher [\#2501](https://github.com/pypeclub/OpenPype/pull/2501) +- Slack: notifications are sent with Openpype logo and bot name [\#2499](https://github.com/pypeclub/OpenPype/pull/2499) +- Slack: Add review to notification message [\#2498](https://github.com/pypeclub/OpenPype/pull/2498) +- Ftrack: Event handlers settings [\#2496](https://github.com/pypeclub/OpenPype/pull/2496) +- Tools: Fix style and modality of errors in loader and creator [\#2489](https://github.com/pypeclub/OpenPype/pull/2489) +- Maya: Collect 'fps' animation data only for "review" instances [\#2486](https://github.com/pypeclub/OpenPype/pull/2486) +- Project Manager: Remove project button cleanup [\#2482](https://github.com/pypeclub/OpenPype/pull/2482) +- Tools: Be able to change models of tasks and assets widgets [\#2475](https://github.com/pypeclub/OpenPype/pull/2475) +- Publish pype: Reduce publish process defering [\#2464](https://github.com/pypeclub/OpenPype/pull/2464) +- Maya: Improve speed of Collect History logic [\#2460](https://github.com/pypeclub/OpenPype/pull/2460) +- Maya: Validate Rig Controllers - fix Error: in script editor [\#2459](https://github.com/pypeclub/OpenPype/pull/2459) +- Maya: Validate NGONs simplify and speed-up [\#2458](https://github.com/pypeclub/OpenPype/pull/2458) +- Maya: Optimize Validate Locked Normals speed for dense polymeshes [\#2457](https://github.com/pypeclub/OpenPype/pull/2457) +- Maya: Refactor missing \_get\_reference\_node method [\#2455](https://github.com/pypeclub/OpenPype/pull/2455) +- Houdini: Remove broken unique name counter [\#2450](https://github.com/pypeclub/OpenPype/pull/2450) +- Maya: Improve lib.polyConstraint performance when Select tool is not the active tool context [\#2447](https://github.com/pypeclub/OpenPype/pull/2447) +- General: Validate third party before build [\#2425](https://github.com/pypeclub/OpenPype/pull/2425) +- Maya : add option to not group reference in ReferenceLoader [\#2383](https://github.com/pypeclub/OpenPype/pull/2383) + +**🐛 Bug fixes** + +- AfterEffects: Fix - removed obsolete import [\#2577](https://github.com/pypeclub/OpenPype/pull/2577) +- General: OpenPype version updates [\#2575](https://github.com/pypeclub/OpenPype/pull/2575) +- Ftrack: Delete action revision [\#2563](https://github.com/pypeclub/OpenPype/pull/2563) +- Webpublisher: ftrack shows incorrect user names [\#2560](https://github.com/pypeclub/OpenPype/pull/2560) +- General: Do not validate version if build does not support it [\#2557](https://github.com/pypeclub/OpenPype/pull/2557) +- Webpublisher: Fixed progress reporting [\#2553](https://github.com/pypeclub/OpenPype/pull/2553) +- Fix Maya AssProxyLoader version switch [\#2551](https://github.com/pypeclub/OpenPype/pull/2551) +- General: Fix install thread in igniter [\#2549](https://github.com/pypeclub/OpenPype/pull/2549) +- Houdini: vdbcache family preserve frame numbers on publish integration + enable validate version for Houdini [\#2535](https://github.com/pypeclub/OpenPype/pull/2535) +- Maya: Fix Load VDB to V-Ray [\#2533](https://github.com/pypeclub/OpenPype/pull/2533) +- Maya: ReferenceLoader fix not unique group name error for attach to root [\#2532](https://github.com/pypeclub/OpenPype/pull/2532) +- Maya: namespaced context go back to original namespace when started from inside a namespace [\#2531](https://github.com/pypeclub/OpenPype/pull/2531) +- Fix create zip tool - path argument [\#2522](https://github.com/pypeclub/OpenPype/pull/2522) +- Maya: Fix Extract Look with space in names [\#2518](https://github.com/pypeclub/OpenPype/pull/2518) +- Fix published frame content for sequence starting with 0 [\#2513](https://github.com/pypeclub/OpenPype/pull/2513) +- Maya: reset empty string attributes correctly to "" instead of "None" [\#2506](https://github.com/pypeclub/OpenPype/pull/2506) +- Improve FusionPreLaunch hook errors [\#2505](https://github.com/pypeclub/OpenPype/pull/2505) +- General: Settings work if OpenPypeVersion is available [\#2494](https://github.com/pypeclub/OpenPype/pull/2494) +- General: PYTHONPATH may break OpenPype dependencies [\#2493](https://github.com/pypeclub/OpenPype/pull/2493) +- General: Modules import function output fix [\#2492](https://github.com/pypeclub/OpenPype/pull/2492) +- AE: fix hiding of alert window below Publish [\#2491](https://github.com/pypeclub/OpenPype/pull/2491) +- Workfiles tool: Files widget show files on first show [\#2488](https://github.com/pypeclub/OpenPype/pull/2488) +- General: Custom template paths filter fix [\#2483](https://github.com/pypeclub/OpenPype/pull/2483) +- Loader: Remove always on top flag in tray [\#2480](https://github.com/pypeclub/OpenPype/pull/2480) +- General: Anatomy does not return root envs as unicode [\#2465](https://github.com/pypeclub/OpenPype/pull/2465) +- Maya: Validate Shape Zero do not keep fixed geometry vertices selected/active after repair [\#2456](https://github.com/pypeclub/OpenPype/pull/2456) + +**Merged pull requests:** + +- AfterEffects: Move implementation to OpenPype [\#2543](https://github.com/pypeclub/OpenPype/pull/2543) +- Maya: Remove Maya Look Assigner check on startup [\#2540](https://github.com/pypeclub/OpenPype/pull/2540) +- build\(deps\): bump shelljs from 0.8.4 to 0.8.5 in /website [\#2538](https://github.com/pypeclub/OpenPype/pull/2538) +- build\(deps\): bump follow-redirects from 1.14.4 to 1.14.7 in /website [\#2534](https://github.com/pypeclub/OpenPype/pull/2534) +- Nuke: Merge avalon's implementation into OpenPype [\#2514](https://github.com/pypeclub/OpenPype/pull/2514) +- Maya: Vray fix proxies look assignment [\#2392](https://github.com/pypeclub/OpenPype/pull/2392) +- Bump algoliasearch-helper from 3.4.4 to 3.6.2 in /website [\#2297](https://github.com/pypeclub/OpenPype/pull/2297) ## [3.7.0](https://github.com/pypeclub/OpenPype/tree/3.7.0) (2022-01-04) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.7.0-nightly.14...3.7.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.4...3.7.0) + +**Deprecated:** + +- General: Default modules hierarchy n2 [\#2368](https://github.com/pypeclub/OpenPype/pull/2368) + +### 📖 Documentation + +- docs\[website\]: Add Ellipse Studio \(logo\) as an OpenPype contributor [\#2324](https://github.com/pypeclub/OpenPype/pull/2324) + +**🆕 New features** + +- Settings UI use OpenPype styles [\#2296](https://github.com/pypeclub/OpenPype/pull/2296) +- Store typed version dependencies for workfiles [\#2192](https://github.com/pypeclub/OpenPype/pull/2192) +- OpenPypeV3: add key task type, task shortname and user to path templating construction [\#2157](https://github.com/pypeclub/OpenPype/pull/2157) +- Nuke: Alembic model workflow [\#2140](https://github.com/pypeclub/OpenPype/pull/2140) +- TVPaint: Load workfile from published. [\#1980](https://github.com/pypeclub/OpenPype/pull/1980) + +**🚀 Enhancements** + +- General: Workdir extra folders [\#2462](https://github.com/pypeclub/OpenPype/pull/2462) +- Photoshop: New style validations for New publisher [\#2429](https://github.com/pypeclub/OpenPype/pull/2429) +- General: Environment variables groups [\#2424](https://github.com/pypeclub/OpenPype/pull/2424) +- Unreal: Dynamic menu created in Python [\#2422](https://github.com/pypeclub/OpenPype/pull/2422) +- Settings UI: Hyperlinks to settings [\#2420](https://github.com/pypeclub/OpenPype/pull/2420) +- Modules: JobQueue module moved one hierarchy level higher [\#2419](https://github.com/pypeclub/OpenPype/pull/2419) +- TimersManager: Start timer post launch hook [\#2418](https://github.com/pypeclub/OpenPype/pull/2418) +- General: Run applications as separate processes under linux [\#2408](https://github.com/pypeclub/OpenPype/pull/2408) +- Ftrack: Check existence of object type on recreation [\#2404](https://github.com/pypeclub/OpenPype/pull/2404) +- Enhancement: Global cleanup plugin that explicitly remove paths from context [\#2402](https://github.com/pypeclub/OpenPype/pull/2402) +- General: MongoDB ability to specify replica set groups [\#2401](https://github.com/pypeclub/OpenPype/pull/2401) +- Flame: moving `utility_scripts` to api folder also with `scripts` [\#2385](https://github.com/pypeclub/OpenPype/pull/2385) +- Centos 7 dependency compatibility [\#2384](https://github.com/pypeclub/OpenPype/pull/2384) +- Enhancement: Settings: Use project settings values from another project [\#2382](https://github.com/pypeclub/OpenPype/pull/2382) +- Blender 3: Support auto install for new blender version [\#2377](https://github.com/pypeclub/OpenPype/pull/2377) +- Maya add render image path to settings [\#2375](https://github.com/pypeclub/OpenPype/pull/2375) +- Settings: Webpublisher in hosts enum [\#2367](https://github.com/pypeclub/OpenPype/pull/2367) +- Hiero: python3 compatibility [\#2365](https://github.com/pypeclub/OpenPype/pull/2365) +- Burnins: Be able recognize mxf OPAtom format [\#2361](https://github.com/pypeclub/OpenPype/pull/2361) +- Maya: Add is\_static\_image\_plane and is\_in\_all\_views option in imagePlaneLoader [\#2356](https://github.com/pypeclub/OpenPype/pull/2356) +- Local settings: Copyable studio paths [\#2349](https://github.com/pypeclub/OpenPype/pull/2349) +- Assets Widget: Clear model on project change [\#2345](https://github.com/pypeclub/OpenPype/pull/2345) +- General: OpenPype default modules hierarchy [\#2338](https://github.com/pypeclub/OpenPype/pull/2338) +- TVPaint: Move implementation to OpenPype [\#2336](https://github.com/pypeclub/OpenPype/pull/2336) +- General: FFprobe error exception contain original error message [\#2328](https://github.com/pypeclub/OpenPype/pull/2328) +- Resolve: Add experimental button to menu [\#2325](https://github.com/pypeclub/OpenPype/pull/2325) +- Hiero: Add experimental tools action [\#2323](https://github.com/pypeclub/OpenPype/pull/2323) +- Input links: Cleanup and unification of differences [\#2322](https://github.com/pypeclub/OpenPype/pull/2322) +- General: Don't validate vendor bin with executing them [\#2317](https://github.com/pypeclub/OpenPype/pull/2317) +- General: Multilayer EXRs support [\#2315](https://github.com/pypeclub/OpenPype/pull/2315) +- General: Run process log stderr as info log level [\#2309](https://github.com/pypeclub/OpenPype/pull/2309) +- General: Reduce vendor imports [\#2305](https://github.com/pypeclub/OpenPype/pull/2305) +- Tools: Cleanup of unused classes [\#2304](https://github.com/pypeclub/OpenPype/pull/2304) +- Project Manager: Added ability to delete project [\#2298](https://github.com/pypeclub/OpenPype/pull/2298) +- Ftrack: Synchronize input links [\#2287](https://github.com/pypeclub/OpenPype/pull/2287) +- StandalonePublisher: Remove unused plugin ExtractHarmonyZip [\#2277](https://github.com/pypeclub/OpenPype/pull/2277) +- Ftrack: Support multiple reviews [\#2271](https://github.com/pypeclub/OpenPype/pull/2271) +- Ftrack: Remove unused clean component plugin [\#2269](https://github.com/pypeclub/OpenPype/pull/2269) +- Royal Render: Support for rr channels in separate dirs [\#2268](https://github.com/pypeclub/OpenPype/pull/2268) +- Houdini: Add experimental tools action [\#2267](https://github.com/pypeclub/OpenPype/pull/2267) +- Nuke: extract baked review videos presets [\#2248](https://github.com/pypeclub/OpenPype/pull/2248) +- TVPaint: Workers rendering [\#2209](https://github.com/pypeclub/OpenPype/pull/2209) +- OpenPypeV3: Add key parent asset to path templating construction [\#2186](https://github.com/pypeclub/OpenPype/pull/2186) + +**🐛 Bug fixes** + +- TVPaint: Create render layer dialog is in front [\#2471](https://github.com/pypeclub/OpenPype/pull/2471) +- Short Pyblish plugin path [\#2428](https://github.com/pypeclub/OpenPype/pull/2428) +- PS: Introduced settings for invalid characters to use in ValidateNaming plugin [\#2417](https://github.com/pypeclub/OpenPype/pull/2417) +- Settings UI: Breadcrumbs path does not create new entities [\#2416](https://github.com/pypeclub/OpenPype/pull/2416) +- AfterEffects: Variant 2022 is in defaults but missing in schemas [\#2412](https://github.com/pypeclub/OpenPype/pull/2412) +- Nuke: baking representations was not additive [\#2406](https://github.com/pypeclub/OpenPype/pull/2406) +- General: Fix access to environments from default settings [\#2403](https://github.com/pypeclub/OpenPype/pull/2403) +- Fix: Placeholder Input color set fix [\#2399](https://github.com/pypeclub/OpenPype/pull/2399) +- Settings: Fix state change of wrapper label [\#2396](https://github.com/pypeclub/OpenPype/pull/2396) +- Flame: fix ftrack publisher [\#2381](https://github.com/pypeclub/OpenPype/pull/2381) +- hiero: solve custom ocio path [\#2379](https://github.com/pypeclub/OpenPype/pull/2379) +- hiero: fix workio and flatten [\#2378](https://github.com/pypeclub/OpenPype/pull/2378) +- Nuke: fixing menu re-drawing during context change [\#2374](https://github.com/pypeclub/OpenPype/pull/2374) +- Webpublisher: Fix assignment of families of TVpaint instances [\#2373](https://github.com/pypeclub/OpenPype/pull/2373) +- Nuke: fixing node name based on switched asset name [\#2369](https://github.com/pypeclub/OpenPype/pull/2369) +- JobQueue: Fix loading of settings [\#2362](https://github.com/pypeclub/OpenPype/pull/2362) +- Tools: Placeholder color [\#2359](https://github.com/pypeclub/OpenPype/pull/2359) +- Launcher: Minimize button on MacOs [\#2355](https://github.com/pypeclub/OpenPype/pull/2355) +- StandalonePublisher: Fix import of constant [\#2354](https://github.com/pypeclub/OpenPype/pull/2354) +- Houdini: Fix HDA creation [\#2350](https://github.com/pypeclub/OpenPype/pull/2350) +- Adobe products show issue [\#2347](https://github.com/pypeclub/OpenPype/pull/2347) +- Maya Look Assigner: Fix Python 3 compatibility [\#2343](https://github.com/pypeclub/OpenPype/pull/2343) +- Remove wrongly used host for hook [\#2342](https://github.com/pypeclub/OpenPype/pull/2342) +- Tools: Use Qt context on tools show [\#2340](https://github.com/pypeclub/OpenPype/pull/2340) +- Flame: Fix default argument value in custom dictionary [\#2339](https://github.com/pypeclub/OpenPype/pull/2339) +- Timers Manager: Disable auto stop timer on linux platform [\#2334](https://github.com/pypeclub/OpenPype/pull/2334) +- nuke: bake preset single input exception [\#2331](https://github.com/pypeclub/OpenPype/pull/2331) +- Hiero: fixing multiple templates at a hierarchy parent [\#2330](https://github.com/pypeclub/OpenPype/pull/2330) +- Fix - provider icons are pulled from a folder [\#2326](https://github.com/pypeclub/OpenPype/pull/2326) +- InputLinks: Typo in "inputLinks" key [\#2314](https://github.com/pypeclub/OpenPype/pull/2314) +- Deadline timeout and logging [\#2312](https://github.com/pypeclub/OpenPype/pull/2312) +- nuke: do not multiply representation on class method [\#2311](https://github.com/pypeclub/OpenPype/pull/2311) +- Workfiles tool: Fix task formatting [\#2306](https://github.com/pypeclub/OpenPype/pull/2306) +- Delivery: Fix delivery paths created on windows [\#2302](https://github.com/pypeclub/OpenPype/pull/2302) +- Maya: Deadline - fix limit groups [\#2295](https://github.com/pypeclub/OpenPype/pull/2295) +- Royal Render: Fix plugin order and OpenPype auto-detection [\#2291](https://github.com/pypeclub/OpenPype/pull/2291) +- New Publisher: Fix mapping of indexes [\#2285](https://github.com/pypeclub/OpenPype/pull/2285) +- Alternate site for site sync doesnt work for sequences [\#2284](https://github.com/pypeclub/OpenPype/pull/2284) +- FFmpeg: Execute ffprobe using list of arguments instead of string command [\#2281](https://github.com/pypeclub/OpenPype/pull/2281) +- Nuke: Anatomy fill data use task as dictionary [\#2278](https://github.com/pypeclub/OpenPype/pull/2278) +- Bug: fix variable name \_asset\_id in workfiles application [\#2274](https://github.com/pypeclub/OpenPype/pull/2274) +- Version handling fixes [\#2272](https://github.com/pypeclub/OpenPype/pull/2272) + +**Merged pull requests:** + +- Maya: Replaced PATH usage with vendored oiio path for maketx utility [\#2405](https://github.com/pypeclub/OpenPype/pull/2405) +- \[Fix\]\[MAYA\] Handle message type attribute within CollectLook [\#2394](https://github.com/pypeclub/OpenPype/pull/2394) +- Add validator to check correct version of extension for PS and AE [\#2387](https://github.com/pypeclub/OpenPype/pull/2387) +- Maya: configurable model top level validation [\#2321](https://github.com/pypeclub/OpenPype/pull/2321) +- Create test publish class for After Effects [\#2270](https://github.com/pypeclub/OpenPype/pull/2270) ## [3.6.4](https://github.com/pypeclub/OpenPype/tree/3.6.4) (2021-11-23) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.7.0-nightly.1...3.6.4) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.3...3.6.4) + +**🐛 Bug fixes** + +- Nuke: inventory update removes all loaded read nodes [\#2294](https://github.com/pypeclub/OpenPype/pull/2294) ## [3.6.3](https://github.com/pypeclub/OpenPype/tree/3.6.3) (2021-11-19) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.6.3-nightly.1...3.6.3) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.2...3.6.3) + +**🐛 Bug fixes** + +- Deadline: Fix publish targets [\#2280](https://github.com/pypeclub/OpenPype/pull/2280) ## [3.6.2](https://github.com/pypeclub/OpenPype/tree/3.6.2) (2021-11-18) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.6.2-nightly.2...3.6.2) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.1...3.6.2) + +**🚀 Enhancements** + +- Tools: Assets widget [\#2265](https://github.com/pypeclub/OpenPype/pull/2265) +- SceneInventory: Choose loader in asset switcher [\#2262](https://github.com/pypeclub/OpenPype/pull/2262) +- Style: New fonts in OpenPype style [\#2256](https://github.com/pypeclub/OpenPype/pull/2256) +- Tools: SceneInventory in OpenPype [\#2255](https://github.com/pypeclub/OpenPype/pull/2255) +- Tools: Tasks widget [\#2251](https://github.com/pypeclub/OpenPype/pull/2251) +- Tools: Creator in OpenPype [\#2244](https://github.com/pypeclub/OpenPype/pull/2244) +- Added endpoint for configured extensions [\#2221](https://github.com/pypeclub/OpenPype/pull/2221) + +**🐛 Bug fixes** + +- Tools: Parenting of tools in Nuke and Hiero [\#2266](https://github.com/pypeclub/OpenPype/pull/2266) +- limiting validator to specific editorial hosts [\#2264](https://github.com/pypeclub/OpenPype/pull/2264) +- Tools: Select Context dialog attribute fix [\#2261](https://github.com/pypeclub/OpenPype/pull/2261) +- Maya: Render publishing fails on linux [\#2260](https://github.com/pypeclub/OpenPype/pull/2260) +- LookAssigner: Fix tool reopen [\#2259](https://github.com/pypeclub/OpenPype/pull/2259) +- Standalone: editorial not publishing thumbnails on all subsets [\#2258](https://github.com/pypeclub/OpenPype/pull/2258) +- Burnins: Support mxf metadata [\#2247](https://github.com/pypeclub/OpenPype/pull/2247) +- Maya: Support for configurable AOV separator characters [\#2197](https://github.com/pypeclub/OpenPype/pull/2197) +- Maya: texture colorspace modes in looks [\#2195](https://github.com/pypeclub/OpenPype/pull/2195) ## [3.6.1](https://github.com/pypeclub/OpenPype/tree/3.6.1) (2021-11-16) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.6.1-nightly.1...3.6.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.0...3.6.1) + +**🐛 Bug fixes** + +- Loader doesn't allow changing of version before loading [\#2254](https://github.com/pypeclub/OpenPype/pull/2254) ## [3.6.0](https://github.com/pypeclub/OpenPype/tree/3.6.0) (2021-11-15) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.6.0-nightly.6...3.6.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.5.0...3.6.0) + +### 📖 Documentation + +- Add alternative sites for Site Sync [\#2206](https://github.com/pypeclub/OpenPype/pull/2206) +- Add command line way of running site sync server [\#2188](https://github.com/pypeclub/OpenPype/pull/2188) + +**🆕 New features** + +- Add validate active site button to sync queue on a project [\#2176](https://github.com/pypeclub/OpenPype/pull/2176) +- Maya : Colorspace configuration [\#2170](https://github.com/pypeclub/OpenPype/pull/2170) +- Blender: Added support for audio [\#2168](https://github.com/pypeclub/OpenPype/pull/2168) +- Flame: a host basic integration [\#2165](https://github.com/pypeclub/OpenPype/pull/2165) +- Houdini: simple HDA workflow [\#2072](https://github.com/pypeclub/OpenPype/pull/2072) +- Basic Royal Render Integration ✨ [\#2061](https://github.com/pypeclub/OpenPype/pull/2061) +- Camera handling between Blender and Unreal [\#1988](https://github.com/pypeclub/OpenPype/pull/1988) +- switch PyQt5 for PySide2 [\#1744](https://github.com/pypeclub/OpenPype/pull/1744) + +**🚀 Enhancements** + +- Tools: Subset manager in OpenPype [\#2243](https://github.com/pypeclub/OpenPype/pull/2243) +- General: Skip module directories without init file [\#2239](https://github.com/pypeclub/OpenPype/pull/2239) +- General: Static interfaces [\#2238](https://github.com/pypeclub/OpenPype/pull/2238) +- Style: Fix transparent image in style [\#2235](https://github.com/pypeclub/OpenPype/pull/2235) +- Add a "following workfile versioning" option on publish [\#2225](https://github.com/pypeclub/OpenPype/pull/2225) +- Modules: Module can add cli commands [\#2224](https://github.com/pypeclub/OpenPype/pull/2224) +- Webpublisher: Separate webpublisher logic [\#2222](https://github.com/pypeclub/OpenPype/pull/2222) +- Add both side availability on Site Sync sites to Loader [\#2220](https://github.com/pypeclub/OpenPype/pull/2220) +- Tools: Center loader and library loader on show [\#2219](https://github.com/pypeclub/OpenPype/pull/2219) +- Maya : Validate shape zero [\#2212](https://github.com/pypeclub/OpenPype/pull/2212) +- Maya : validate unique names [\#2211](https://github.com/pypeclub/OpenPype/pull/2211) +- Tools: OpenPype stylesheet in workfiles tool [\#2208](https://github.com/pypeclub/OpenPype/pull/2208) +- Ftrack: Replace Queue with deque in event handlers logic [\#2204](https://github.com/pypeclub/OpenPype/pull/2204) +- Tools: New select context dialog [\#2200](https://github.com/pypeclub/OpenPype/pull/2200) +- Maya : Validate mesh ngons [\#2199](https://github.com/pypeclub/OpenPype/pull/2199) +- Dirmap in Nuke [\#2198](https://github.com/pypeclub/OpenPype/pull/2198) +- Delivery: Check 'frame' key in template for sequence delivery [\#2196](https://github.com/pypeclub/OpenPype/pull/2196) +- Settings: Site sync project settings improvement [\#2193](https://github.com/pypeclub/OpenPype/pull/2193) +- Usage of tools code [\#2185](https://github.com/pypeclub/OpenPype/pull/2185) +- Settings: Dictionary based on project roots [\#2184](https://github.com/pypeclub/OpenPype/pull/2184) +- Subset name: Be able to pass asset document to get subset name [\#2179](https://github.com/pypeclub/OpenPype/pull/2179) +- Tools: Experimental tools [\#2167](https://github.com/pypeclub/OpenPype/pull/2167) +- Loader: Refactor and use OpenPype stylesheets [\#2166](https://github.com/pypeclub/OpenPype/pull/2166) +- Add loader for linked smart objects in photoshop [\#2149](https://github.com/pypeclub/OpenPype/pull/2149) +- Burnins: DNxHD profiles handling [\#2142](https://github.com/pypeclub/OpenPype/pull/2142) +- Tools: Single access point for host tools [\#2139](https://github.com/pypeclub/OpenPype/pull/2139) + +**🐛 Bug fixes** + +- Ftrack: Sync project ftrack id cache issue [\#2250](https://github.com/pypeclub/OpenPype/pull/2250) +- Ftrack: Session creation and Prepare project [\#2245](https://github.com/pypeclub/OpenPype/pull/2245) +- Added queue for studio processing in PS [\#2237](https://github.com/pypeclub/OpenPype/pull/2237) +- Python 2: Unicode to string conversion [\#2236](https://github.com/pypeclub/OpenPype/pull/2236) +- Fix - enum for color coding in PS [\#2234](https://github.com/pypeclub/OpenPype/pull/2234) +- Pyblish Tool: Fix targets handling [\#2232](https://github.com/pypeclub/OpenPype/pull/2232) +- Ftrack: Base event fix of 'get\_project\_from\_entity' method [\#2214](https://github.com/pypeclub/OpenPype/pull/2214) +- Maya : multiple subsets review broken [\#2210](https://github.com/pypeclub/OpenPype/pull/2210) +- Fix - different command used for Linux and Mac OS [\#2207](https://github.com/pypeclub/OpenPype/pull/2207) +- Tools: Workfiles tool don't use avalon widgets [\#2205](https://github.com/pypeclub/OpenPype/pull/2205) +- Ftrack: Fill missing ftrack id on mongo project [\#2203](https://github.com/pypeclub/OpenPype/pull/2203) +- Project Manager: Fix copying of tasks [\#2191](https://github.com/pypeclub/OpenPype/pull/2191) +- StandalonePublisher: Source validator don't expect representations [\#2190](https://github.com/pypeclub/OpenPype/pull/2190) +- Blender: Fix trying to pack an image when the shader node has no texture [\#2183](https://github.com/pypeclub/OpenPype/pull/2183) +- Maya: review viewport settings [\#2177](https://github.com/pypeclub/OpenPype/pull/2177) +- MacOS: Launching of applications may cause Permissions error [\#2175](https://github.com/pypeclub/OpenPype/pull/2175) +- Maya: Aspect ratio [\#2174](https://github.com/pypeclub/OpenPype/pull/2174) +- Blender: Fix 'Deselect All' with object not in 'Object Mode' [\#2163](https://github.com/pypeclub/OpenPype/pull/2163) +- Tools: Stylesheets are applied after tool show [\#2161](https://github.com/pypeclub/OpenPype/pull/2161) +- Maya: Collect render - fix UNC path support 🐛 [\#2158](https://github.com/pypeclub/OpenPype/pull/2158) +- Maya: Fix hotbox broken by scriptsmenu [\#2151](https://github.com/pypeclub/OpenPype/pull/2151) +- Ftrack: Ignore save warnings exception in Prepare project action [\#2150](https://github.com/pypeclub/OpenPype/pull/2150) +- Loader thumbnails with smooth edges [\#2147](https://github.com/pypeclub/OpenPype/pull/2147) +- Added validator for source files for Standalone Publisher [\#2138](https://github.com/pypeclub/OpenPype/pull/2138) + +**Merged pull requests:** + +- Bump pillow from 8.2.0 to 8.3.2 [\#2162](https://github.com/pypeclub/OpenPype/pull/2162) +- Bump axios from 0.21.1 to 0.21.4 in /website [\#2059](https://github.com/pypeclub/OpenPype/pull/2059) ## [3.5.0](https://github.com/pypeclub/OpenPype/tree/3.5.0) (2021-10-17) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.5.0-nightly.8...3.5.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.4.1...3.5.0) + +**Deprecated:** + +- Maya: Change mayaAscii family to mayaScene [\#2106](https://github.com/pypeclub/OpenPype/pull/2106) + +**🆕 New features** + +- Added project and task into context change message in Maya [\#2131](https://github.com/pypeclub/OpenPype/pull/2131) +- Add ExtractBurnin to photoshop review [\#2124](https://github.com/pypeclub/OpenPype/pull/2124) +- PYPE-1218 - changed namespace to contain subset name in Maya [\#2114](https://github.com/pypeclub/OpenPype/pull/2114) +- Added running configurable disk mapping command before start of OP [\#2091](https://github.com/pypeclub/OpenPype/pull/2091) +- SFTP provider [\#2073](https://github.com/pypeclub/OpenPype/pull/2073) +- Maya: Validate setdress top group [\#2068](https://github.com/pypeclub/OpenPype/pull/2068) +- Maya: Enable publishing render attrib sets \(e.g. V-Ray Displacement\) with model [\#1955](https://github.com/pypeclub/OpenPype/pull/1955) + +**🚀 Enhancements** + +- Maya: make rig validators configurable in settings [\#2137](https://github.com/pypeclub/OpenPype/pull/2137) +- Settings: Updated readme for entity types in settings [\#2132](https://github.com/pypeclub/OpenPype/pull/2132) +- Nuke: unified clip loader [\#2128](https://github.com/pypeclub/OpenPype/pull/2128) +- Settings UI: Project model refreshing and sorting [\#2104](https://github.com/pypeclub/OpenPype/pull/2104) +- Create Read From Rendered - Disable Relative paths by default [\#2093](https://github.com/pypeclub/OpenPype/pull/2093) +- Added choosing different dirmap mapping if workfile synched locally [\#2088](https://github.com/pypeclub/OpenPype/pull/2088) +- General: Remove IdleManager module [\#2084](https://github.com/pypeclub/OpenPype/pull/2084) +- Tray UI: Message box about missing settings defaults [\#2080](https://github.com/pypeclub/OpenPype/pull/2080) +- Tray UI: Show menu where first click happened [\#2079](https://github.com/pypeclub/OpenPype/pull/2079) +- Global: add global validators to settings [\#2078](https://github.com/pypeclub/OpenPype/pull/2078) +- Use CRF for burnin when available [\#2070](https://github.com/pypeclub/OpenPype/pull/2070) +- Project manager: Filter first item after selection of project [\#2069](https://github.com/pypeclub/OpenPype/pull/2069) +- Nuke: Adding `still` image family workflow [\#2064](https://github.com/pypeclub/OpenPype/pull/2064) +- Maya: validate authorized loaded plugins [\#2062](https://github.com/pypeclub/OpenPype/pull/2062) +- Tools: add support for pyenv on windows [\#2051](https://github.com/pypeclub/OpenPype/pull/2051) +- SyncServer: Dropbox Provider [\#1979](https://github.com/pypeclub/OpenPype/pull/1979) +- Burnin: Get data from context with defined keys. [\#1897](https://github.com/pypeclub/OpenPype/pull/1897) +- Timers manager: Get task time [\#1896](https://github.com/pypeclub/OpenPype/pull/1896) +- TVPaint: Option to stop timer on application exit. [\#1887](https://github.com/pypeclub/OpenPype/pull/1887) + +**🐛 Bug fixes** + +- Maya: fix model publishing [\#2130](https://github.com/pypeclub/OpenPype/pull/2130) +- Fix - oiiotool wasn't recognized even if present [\#2129](https://github.com/pypeclub/OpenPype/pull/2129) +- General: Disk mapping group [\#2120](https://github.com/pypeclub/OpenPype/pull/2120) +- Hiero: publishing effect first time makes wrong resources path [\#2115](https://github.com/pypeclub/OpenPype/pull/2115) +- Add startup script for Houdini Core. [\#2110](https://github.com/pypeclub/OpenPype/pull/2110) +- TVPaint: Behavior name of loop also accept repeat [\#2109](https://github.com/pypeclub/OpenPype/pull/2109) +- Ftrack: Project settings save custom attributes skip unknown attributes [\#2103](https://github.com/pypeclub/OpenPype/pull/2103) +- Blender: Fix NoneType error when animation\_data is missing for a rig [\#2101](https://github.com/pypeclub/OpenPype/pull/2101) +- Fix broken import in sftp provider [\#2100](https://github.com/pypeclub/OpenPype/pull/2100) +- Global: Fix docstring on publish plugin extract review [\#2097](https://github.com/pypeclub/OpenPype/pull/2097) +- Delivery Action Files Sequence fix [\#2096](https://github.com/pypeclub/OpenPype/pull/2096) +- General: Cloud mongo ca certificate issue [\#2095](https://github.com/pypeclub/OpenPype/pull/2095) +- TVPaint: Creator use context from workfile [\#2087](https://github.com/pypeclub/OpenPype/pull/2087) +- Blender: fix texture missing when publishing blend files [\#2085](https://github.com/pypeclub/OpenPype/pull/2085) +- General: Startup validations oiio tool path fix on linux [\#2083](https://github.com/pypeclub/OpenPype/pull/2083) +- Deadline: Collect deadline server does not check existence of deadline key [\#2082](https://github.com/pypeclub/OpenPype/pull/2082) +- Blender: fixed Curves with modifiers in Rigs [\#2081](https://github.com/pypeclub/OpenPype/pull/2081) +- Nuke UI scaling [\#2077](https://github.com/pypeclub/OpenPype/pull/2077) +- Maya: Fix multi-camera renders [\#2065](https://github.com/pypeclub/OpenPype/pull/2065) +- Fix Sync Queue when project disabled [\#2063](https://github.com/pypeclub/OpenPype/pull/2063) + +**Merged pull requests:** + +- Bump pywin32 from 300 to 301 [\#2086](https://github.com/pypeclub/OpenPype/pull/2086) ## [3.4.1](https://github.com/pypeclub/OpenPype/tree/3.4.1) (2021-09-23) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.4.1-nightly.1...3.4.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.4.0...3.4.1) + +**🆕 New features** + +- Settings: Flag project as deactivated and hide from tools' view [\#2008](https://github.com/pypeclub/OpenPype/pull/2008) + +**🚀 Enhancements** + +- General: Startup validations [\#2054](https://github.com/pypeclub/OpenPype/pull/2054) +- Nuke: proxy mode validator [\#2052](https://github.com/pypeclub/OpenPype/pull/2052) +- Ftrack: Removed ftrack interface [\#2049](https://github.com/pypeclub/OpenPype/pull/2049) +- Settings UI: Deffered set value on entity [\#2044](https://github.com/pypeclub/OpenPype/pull/2044) +- Loader: Families filtering [\#2043](https://github.com/pypeclub/OpenPype/pull/2043) +- Settings UI: Project view enhancements [\#2042](https://github.com/pypeclub/OpenPype/pull/2042) +- Settings for Nuke IncrementScriptVersion [\#2039](https://github.com/pypeclub/OpenPype/pull/2039) +- Loader & Library loader: Use tools from OpenPype [\#2038](https://github.com/pypeclub/OpenPype/pull/2038) +- Adding predefined project folders creation in PM [\#2030](https://github.com/pypeclub/OpenPype/pull/2030) +- WebserverModule: Removed interface of webserver module [\#2028](https://github.com/pypeclub/OpenPype/pull/2028) +- TimersManager: Removed interface of timers manager [\#2024](https://github.com/pypeclub/OpenPype/pull/2024) +- Feature Maya import asset from scene inventory [\#2018](https://github.com/pypeclub/OpenPype/pull/2018) + +**🐛 Bug fixes** + +- Timers manger: Typo fix [\#2058](https://github.com/pypeclub/OpenPype/pull/2058) +- Hiero: Editorial fixes [\#2057](https://github.com/pypeclub/OpenPype/pull/2057) +- Differentiate jpg sequences from thumbnail [\#2056](https://github.com/pypeclub/OpenPype/pull/2056) +- FFmpeg: Split command to list does not work [\#2046](https://github.com/pypeclub/OpenPype/pull/2046) +- Removed shell flag in subprocess call [\#2045](https://github.com/pypeclub/OpenPype/pull/2045) + +**Merged pull requests:** + +- Bump prismjs from 1.24.0 to 1.25.0 in /website [\#2050](https://github.com/pypeclub/OpenPype/pull/2050) ## [3.4.0](https://github.com/pypeclub/OpenPype/tree/3.4.0) (2021-09-17) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.4.0-nightly.6...3.4.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.1...3.4.0) + +### 📖 Documentation + +- Documentation: Ftrack launch argsuments update [\#2014](https://github.com/pypeclub/OpenPype/pull/2014) +- Nuke Quick Start / Tutorial [\#1952](https://github.com/pypeclub/OpenPype/pull/1952) +- Houdini: add Camera, Point Cache, Composite, Redshift ROP and VDB Cache support [\#1821](https://github.com/pypeclub/OpenPype/pull/1821) + +**🆕 New features** + +- Nuke: Compatibility with Nuke 13 [\#2003](https://github.com/pypeclub/OpenPype/pull/2003) +- Maya: Add Xgen family support [\#1947](https://github.com/pypeclub/OpenPype/pull/1947) +- Feature/webpublisher backend [\#1876](https://github.com/pypeclub/OpenPype/pull/1876) +- Blender: Improved assets handling [\#1615](https://github.com/pypeclub/OpenPype/pull/1615) + +**🚀 Enhancements** + +- Added possibility to configure of synchronization of workfile version… [\#2041](https://github.com/pypeclub/OpenPype/pull/2041) +- General: Task types in profiles [\#2036](https://github.com/pypeclub/OpenPype/pull/2036) +- Console interpreter: Handle invalid sizes on initialization [\#2022](https://github.com/pypeclub/OpenPype/pull/2022) +- Ftrack: Show OpenPype versions in event server status [\#2019](https://github.com/pypeclub/OpenPype/pull/2019) +- General: Staging icon [\#2017](https://github.com/pypeclub/OpenPype/pull/2017) +- Ftrack: Sync to avalon actions have jobs [\#2015](https://github.com/pypeclub/OpenPype/pull/2015) +- Modules: Connect method is not required [\#2009](https://github.com/pypeclub/OpenPype/pull/2009) +- Settings UI: Number with configurable steps [\#2001](https://github.com/pypeclub/OpenPype/pull/2001) +- Moving project folder structure creation out of ftrack module \#1989 [\#1996](https://github.com/pypeclub/OpenPype/pull/1996) +- Configurable items for providers without Settings [\#1987](https://github.com/pypeclub/OpenPype/pull/1987) +- Global: Example addons [\#1986](https://github.com/pypeclub/OpenPype/pull/1986) +- Standalone Publisher: Extract harmony zip handle workfile template [\#1982](https://github.com/pypeclub/OpenPype/pull/1982) +- Settings UI: Number sliders [\#1978](https://github.com/pypeclub/OpenPype/pull/1978) +- Workfiles: Support more workfile templates [\#1966](https://github.com/pypeclub/OpenPype/pull/1966) +- Launcher: Fix crashes on action click [\#1964](https://github.com/pypeclub/OpenPype/pull/1964) +- Settings: Minor fixes in UI and missing default values [\#1963](https://github.com/pypeclub/OpenPype/pull/1963) +- Blender: Toggle system console works on windows [\#1962](https://github.com/pypeclub/OpenPype/pull/1962) +- Global: Settings defined by Addons/Modules [\#1959](https://github.com/pypeclub/OpenPype/pull/1959) +- CI: change release numbering triggers [\#1954](https://github.com/pypeclub/OpenPype/pull/1954) +- Global: Avalon Host name collector [\#1949](https://github.com/pypeclub/OpenPype/pull/1949) +- Global: Define hosts in CollectSceneVersion [\#1948](https://github.com/pypeclub/OpenPype/pull/1948) +- Add face sets to exported alembics [\#1942](https://github.com/pypeclub/OpenPype/pull/1942) +- OpenPype: Add version validation and `--headless` mode and update progress 🔄 [\#1939](https://github.com/pypeclub/OpenPype/pull/1939) +- \#1894 - adds host to template\_name\_profiles for filtering [\#1915](https://github.com/pypeclub/OpenPype/pull/1915) +- Environments: Tool environments in alphabetical order [\#1910](https://github.com/pypeclub/OpenPype/pull/1910) +- Disregard publishing time. [\#1888](https://github.com/pypeclub/OpenPype/pull/1888) +- Dynamic modules [\#1872](https://github.com/pypeclub/OpenPype/pull/1872) + +**🐛 Bug fixes** + +- Workfiles tool: Task selection [\#2040](https://github.com/pypeclub/OpenPype/pull/2040) +- Ftrack: Delete old versions missing settings key [\#2037](https://github.com/pypeclub/OpenPype/pull/2037) +- Nuke: typo on a button [\#2034](https://github.com/pypeclub/OpenPype/pull/2034) +- Hiero: Fix "none" named tags [\#2033](https://github.com/pypeclub/OpenPype/pull/2033) +- FFmpeg: Subprocess arguments as list [\#2032](https://github.com/pypeclub/OpenPype/pull/2032) +- General: Fix Python 2 breaking line [\#2016](https://github.com/pypeclub/OpenPype/pull/2016) +- Bugfix/webpublisher task type [\#2006](https://github.com/pypeclub/OpenPype/pull/2006) +- Nuke thumbnails generated from middle of the sequence [\#1992](https://github.com/pypeclub/OpenPype/pull/1992) +- Nuke: last version from path gets correct version [\#1990](https://github.com/pypeclub/OpenPype/pull/1990) +- nuke, resolve, hiero: precollector order lest then 0.5 [\#1984](https://github.com/pypeclub/OpenPype/pull/1984) +- Last workfile with multiple work templates [\#1981](https://github.com/pypeclub/OpenPype/pull/1981) +- Collectors order [\#1977](https://github.com/pypeclub/OpenPype/pull/1977) +- Stop timer was within validator order range. [\#1975](https://github.com/pypeclub/OpenPype/pull/1975) +- Ftrack: arrow submodule has https url source [\#1974](https://github.com/pypeclub/OpenPype/pull/1974) +- Ftrack: Fix hosts attribute in collect ftrack username [\#1972](https://github.com/pypeclub/OpenPype/pull/1972) +- Deadline: Houdini plugins in different hierarchy [\#1970](https://github.com/pypeclub/OpenPype/pull/1970) +- Removed deprecated submodules [\#1967](https://github.com/pypeclub/OpenPype/pull/1967) +- Global: ExtractJpeg can handle filepaths with spaces [\#1961](https://github.com/pypeclub/OpenPype/pull/1961) +- Resolve path when adding to zip [\#1960](https://github.com/pypeclub/OpenPype/pull/1960) + +**Merged pull requests:** + +- Bump url-parse from 1.5.1 to 1.5.3 in /website [\#1958](https://github.com/pypeclub/OpenPype/pull/1958) +- Bump path-parse from 1.0.6 to 1.0.7 in /website [\#1933](https://github.com/pypeclub/OpenPype/pull/1933) ## [3.3.1](https://github.com/pypeclub/OpenPype/tree/3.3.1) (2021-08-20) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.3.1-nightly.1...3.3.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.0...3.3.1) + +**🐛 Bug fixes** + +- TVPaint: Fixed rendered frame indexes [\#1946](https://github.com/pypeclub/OpenPype/pull/1946) +- Maya: Menu actions fix [\#1945](https://github.com/pypeclub/OpenPype/pull/1945) +- standalone: editorial shared object problem [\#1941](https://github.com/pypeclub/OpenPype/pull/1941) +- Bugfix nuke deadline app name [\#1928](https://github.com/pypeclub/OpenPype/pull/1928) ## [3.3.0](https://github.com/pypeclub/OpenPype/tree/3.3.0) (2021-08-17) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.3.0-nightly.11...3.3.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.2.0...3.3.0) + +### 📖 Documentation + +- Standalone Publish of textures family [\#1834](https://github.com/pypeclub/OpenPype/pull/1834) + +**🆕 New features** + +- Settings UI: Breadcrumbs in settings [\#1932](https://github.com/pypeclub/OpenPype/pull/1932) +- Maya: Scene patching 🩹on submission to Deadline [\#1923](https://github.com/pypeclub/OpenPype/pull/1923) +- Feature AE local render [\#1901](https://github.com/pypeclub/OpenPype/pull/1901) + +**🚀 Enhancements** + +- Python console interpreter [\#1940](https://github.com/pypeclub/OpenPype/pull/1940) +- Global: Updated logos and Default settings [\#1927](https://github.com/pypeclub/OpenPype/pull/1927) +- Check for missing ✨ Python when using `pyenv` [\#1925](https://github.com/pypeclub/OpenPype/pull/1925) +- Settings: Default values for enum [\#1920](https://github.com/pypeclub/OpenPype/pull/1920) +- Settings UI: Modifiable dict view enhance [\#1919](https://github.com/pypeclub/OpenPype/pull/1919) +- submodules: avalon-core update [\#1911](https://github.com/pypeclub/OpenPype/pull/1911) +- Ftrack: Where I run action enhancement [\#1900](https://github.com/pypeclub/OpenPype/pull/1900) +- Ftrack: Private project server actions [\#1899](https://github.com/pypeclub/OpenPype/pull/1899) +- Support nested studio plugins paths. [\#1898](https://github.com/pypeclub/OpenPype/pull/1898) +- Settings: global validators with options [\#1892](https://github.com/pypeclub/OpenPype/pull/1892) +- Settings: Conditional dict enum positioning [\#1891](https://github.com/pypeclub/OpenPype/pull/1891) +- Expose stop timer through rest api. [\#1886](https://github.com/pypeclub/OpenPype/pull/1886) +- TVPaint: Increment workfile [\#1885](https://github.com/pypeclub/OpenPype/pull/1885) +- Allow Multiple Notes to run on tasks. [\#1882](https://github.com/pypeclub/OpenPype/pull/1882) +- Prepare for pyside2 [\#1869](https://github.com/pypeclub/OpenPype/pull/1869) +- Filter hosts in settings host-enum [\#1868](https://github.com/pypeclub/OpenPype/pull/1868) +- Local actions with process identifier [\#1867](https://github.com/pypeclub/OpenPype/pull/1867) +- Workfile tool start at host launch support [\#1865](https://github.com/pypeclub/OpenPype/pull/1865) +- Anatomy schema validation [\#1864](https://github.com/pypeclub/OpenPype/pull/1864) +- Ftrack prepare project structure [\#1861](https://github.com/pypeclub/OpenPype/pull/1861) +- Maya: support for configurable `dirmap` 🗺️ [\#1859](https://github.com/pypeclub/OpenPype/pull/1859) +- Independent general environments [\#1853](https://github.com/pypeclub/OpenPype/pull/1853) +- TVPaint Start Frame [\#1844](https://github.com/pypeclub/OpenPype/pull/1844) +- Ftrack push attributes action adds traceback to job [\#1843](https://github.com/pypeclub/OpenPype/pull/1843) +- Prepare project action enhance [\#1838](https://github.com/pypeclub/OpenPype/pull/1838) +- nuke: settings create missing default subsets [\#1829](https://github.com/pypeclub/OpenPype/pull/1829) +- Update poetry lock [\#1823](https://github.com/pypeclub/OpenPype/pull/1823) +- Settings: settings for plugins [\#1819](https://github.com/pypeclub/OpenPype/pull/1819) +- Settings list can use template or schema as object type [\#1815](https://github.com/pypeclub/OpenPype/pull/1815) +- Maya: Deadline custom settings [\#1797](https://github.com/pypeclub/OpenPype/pull/1797) +- Maya: Shader name validation [\#1762](https://github.com/pypeclub/OpenPype/pull/1762) + +**🐛 Bug fixes** + +- Fix - ftrack family was added incorrectly in some cases [\#1935](https://github.com/pypeclub/OpenPype/pull/1935) +- Fix - Deadline publish on Linux started Tray instead of headless publishing [\#1930](https://github.com/pypeclub/OpenPype/pull/1930) +- Maya: Validate Model Name - repair accident deletion in settings defaults [\#1929](https://github.com/pypeclub/OpenPype/pull/1929) +- Nuke: submit to farm failed due `ftrack` family remove [\#1926](https://github.com/pypeclub/OpenPype/pull/1926) +- Fix - validate takes repre\["files"\] as list all the time [\#1922](https://github.com/pypeclub/OpenPype/pull/1922) +- standalone: validator asset parents [\#1917](https://github.com/pypeclub/OpenPype/pull/1917) +- Nuke: update video file crassing [\#1916](https://github.com/pypeclub/OpenPype/pull/1916) +- Fix - texture validators for workfiles triggers only for textures workfiles [\#1914](https://github.com/pypeclub/OpenPype/pull/1914) +- Settings UI: List order works as expected [\#1906](https://github.com/pypeclub/OpenPype/pull/1906) +- Hiero: loaded clip was not set colorspace from version data [\#1904](https://github.com/pypeclub/OpenPype/pull/1904) +- Pyblish UI: Fix collecting stage processing [\#1903](https://github.com/pypeclub/OpenPype/pull/1903) +- Burnins: Use input's bitrate in h624 [\#1902](https://github.com/pypeclub/OpenPype/pull/1902) +- Bug: fixed python detection [\#1893](https://github.com/pypeclub/OpenPype/pull/1893) +- global: integrate name missing default template [\#1890](https://github.com/pypeclub/OpenPype/pull/1890) +- publisher: editorial plugins fixes [\#1889](https://github.com/pypeclub/OpenPype/pull/1889) +- Normalize path returned from Workfiles. [\#1880](https://github.com/pypeclub/OpenPype/pull/1880) +- Workfiles tool event arguments fix [\#1862](https://github.com/pypeclub/OpenPype/pull/1862) +- imageio: fix grouping [\#1856](https://github.com/pypeclub/OpenPype/pull/1856) +- Maya: don't add reference members as connections to the container set 📦 [\#1855](https://github.com/pypeclub/OpenPype/pull/1855) +- publisher: missing version in subset prop [\#1849](https://github.com/pypeclub/OpenPype/pull/1849) +- Ftrack type error fix in sync to avalon event handler [\#1845](https://github.com/pypeclub/OpenPype/pull/1845) +- Nuke: updating effects subset fail [\#1841](https://github.com/pypeclub/OpenPype/pull/1841) +- nuke: write render node skipped with crop [\#1836](https://github.com/pypeclub/OpenPype/pull/1836) +- Project folder structure overrides [\#1813](https://github.com/pypeclub/OpenPype/pull/1813) +- Maya: fix yeti settings path in extractor [\#1809](https://github.com/pypeclub/OpenPype/pull/1809) +- Failsafe for cross project containers. [\#1806](https://github.com/pypeclub/OpenPype/pull/1806) +- Houdini colector formatting keys fix [\#1802](https://github.com/pypeclub/OpenPype/pull/1802) +- Settings error dialog on show [\#1798](https://github.com/pypeclub/OpenPype/pull/1798) +- Application launch stdout/stderr in GUI build [\#1684](https://github.com/pypeclub/OpenPype/pull/1684) +- Nuke: re-use instance nodes output path [\#1577](https://github.com/pypeclub/OpenPype/pull/1577) + +**Merged pull requests:** + +- Fix - make AE workfile publish to Ftrack configurable [\#1937](https://github.com/pypeclub/OpenPype/pull/1937) +- Add support for multiple Deadline ☠️➖ servers [\#1905](https://github.com/pypeclub/OpenPype/pull/1905) +- Maya: add support for `RedshiftNormalMap` node, fix `tx` linear space 🚀 [\#1863](https://github.com/pypeclub/OpenPype/pull/1863) +- Maya: expected files -\> render products ⚙️ overhaul [\#1812](https://github.com/pypeclub/OpenPype/pull/1812) +- PS, AE - send actual context when another webserver is running [\#1811](https://github.com/pypeclub/OpenPype/pull/1811) ## [3.2.0](https://github.com/pypeclub/OpenPype/tree/3.2.0) (2021-07-13) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.2.0-nightly.7...3.2.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/2.18.4...3.2.0) + +### 📖 Documentation + +- Fix: staging and `--use-version` option [\#1786](https://github.com/pypeclub/OpenPype/pull/1786) +- Subset template and TVPaint subset template docs [\#1717](https://github.com/pypeclub/OpenPype/pull/1717) +- Overscan color extract review [\#1701](https://github.com/pypeclub/OpenPype/pull/1701) + +**🚀 Enhancements** + +- Nuke: ftrack family plugin settings preset [\#1805](https://github.com/pypeclub/OpenPype/pull/1805) +- Standalone publisher last project [\#1799](https://github.com/pypeclub/OpenPype/pull/1799) +- Ftrack Multiple notes as server action [\#1795](https://github.com/pypeclub/OpenPype/pull/1795) +- Settings conditional dict [\#1777](https://github.com/pypeclub/OpenPype/pull/1777) +- Settings application use python 2 only where needed [\#1776](https://github.com/pypeclub/OpenPype/pull/1776) +- Settings UI copy/paste [\#1769](https://github.com/pypeclub/OpenPype/pull/1769) +- Workfile tool widths [\#1766](https://github.com/pypeclub/OpenPype/pull/1766) +- Push hierarchical attributes care about task parent changes [\#1763](https://github.com/pypeclub/OpenPype/pull/1763) +- Application executables with environment variables [\#1757](https://github.com/pypeclub/OpenPype/pull/1757) +- Deadline: Nuke submission additional attributes [\#1756](https://github.com/pypeclub/OpenPype/pull/1756) +- Settings schema without prefill [\#1753](https://github.com/pypeclub/OpenPype/pull/1753) +- Settings Hosts enum [\#1739](https://github.com/pypeclub/OpenPype/pull/1739) +- Validate containers settings [\#1736](https://github.com/pypeclub/OpenPype/pull/1736) +- PS - added loader from sequence [\#1726](https://github.com/pypeclub/OpenPype/pull/1726) +- Autoupdate launcher [\#1725](https://github.com/pypeclub/OpenPype/pull/1725) +- Toggle Ftrack upload in StandalonePublisher [\#1708](https://github.com/pypeclub/OpenPype/pull/1708) +- Nuke: Prerender Frame Range by default [\#1699](https://github.com/pypeclub/OpenPype/pull/1699) +- Smoother edges of color triangle [\#1695](https://github.com/pypeclub/OpenPype/pull/1695) + +**🐛 Bug fixes** + +- nuke: fixing wrong name of family folder when `used existing frames` [\#1803](https://github.com/pypeclub/OpenPype/pull/1803) +- Collect ftrack family bugs [\#1801](https://github.com/pypeclub/OpenPype/pull/1801) +- Invitee email can be None which break the Ftrack commit. [\#1788](https://github.com/pypeclub/OpenPype/pull/1788) +- Otio unrelated error on import [\#1782](https://github.com/pypeclub/OpenPype/pull/1782) +- FFprobe streams order [\#1775](https://github.com/pypeclub/OpenPype/pull/1775) +- Fix - single file files are str only, cast it to list to count properly [\#1772](https://github.com/pypeclub/OpenPype/pull/1772) +- Environments in app executable for MacOS [\#1768](https://github.com/pypeclub/OpenPype/pull/1768) +- Project specific environments [\#1767](https://github.com/pypeclub/OpenPype/pull/1767) +- Settings UI with refresh button [\#1764](https://github.com/pypeclub/OpenPype/pull/1764) +- Standalone publisher thumbnail extractor fix [\#1761](https://github.com/pypeclub/OpenPype/pull/1761) +- Anatomy others templates don't cause crash [\#1758](https://github.com/pypeclub/OpenPype/pull/1758) +- Backend acre module commit update [\#1745](https://github.com/pypeclub/OpenPype/pull/1745) +- hiero: precollect instances failing when audio selected [\#1743](https://github.com/pypeclub/OpenPype/pull/1743) +- Hiero: creator instance error [\#1742](https://github.com/pypeclub/OpenPype/pull/1742) +- Nuke: fixing render creator for no selection format failing [\#1741](https://github.com/pypeclub/OpenPype/pull/1741) +- StandalonePublisher: failing collector for editorial [\#1738](https://github.com/pypeclub/OpenPype/pull/1738) +- Local settings UI crash on missing defaults [\#1737](https://github.com/pypeclub/OpenPype/pull/1737) +- TVPaint white background on thumbnail [\#1735](https://github.com/pypeclub/OpenPype/pull/1735) +- Ftrack missing custom attribute message [\#1734](https://github.com/pypeclub/OpenPype/pull/1734) +- Launcher project changes [\#1733](https://github.com/pypeclub/OpenPype/pull/1733) +- Ftrack sync status [\#1732](https://github.com/pypeclub/OpenPype/pull/1732) +- TVPaint use layer name for default variant [\#1724](https://github.com/pypeclub/OpenPype/pull/1724) +- Default subset template for TVPaint review and workfile families [\#1716](https://github.com/pypeclub/OpenPype/pull/1716) +- Maya: Extract review hotfix [\#1714](https://github.com/pypeclub/OpenPype/pull/1714) +- Settings: Imageio improving granularity [\#1711](https://github.com/pypeclub/OpenPype/pull/1711) +- Application without executables [\#1679](https://github.com/pypeclub/OpenPype/pull/1679) +- Unreal: launching on Linux [\#1672](https://github.com/pypeclub/OpenPype/pull/1672) + +**Merged pull requests:** + +- Bump prismjs from 1.23.0 to 1.24.0 in /website [\#1773](https://github.com/pypeclub/OpenPype/pull/1773) +- TVPaint ftrack family [\#1755](https://github.com/pypeclub/OpenPype/pull/1755) ## [2.18.4](https://github.com/pypeclub/OpenPype/tree/2.18.4) (2021-06-24) @@ -260,7 +1864,7 @@ ## [2.18.3](https://github.com/pypeclub/OpenPype/tree/2.18.3) (2021-06-23) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.2.0-nightly.2...2.18.3) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/2.18.2...2.18.3) ## [2.18.2](https://github.com/pypeclub/OpenPype/tree/2.18.2) (2021-06-16) @@ -268,9 +1872,47 @@ ## [3.1.0](https://github.com/pypeclub/OpenPype/tree/3.1.0) (2021-06-15) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.1.0-nightly.4...3.1.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.0.0...3.1.0) -# Changelog +### 📖 Documentation + +- Feature Slack integration [\#1657](https://github.com/pypeclub/OpenPype/pull/1657) + +**🚀 Enhancements** + +- Log Viewer with OpenPype style [\#1703](https://github.com/pypeclub/OpenPype/pull/1703) +- Scrolling in OpenPype info widget [\#1702](https://github.com/pypeclub/OpenPype/pull/1702) +- OpenPype style in modules [\#1694](https://github.com/pypeclub/OpenPype/pull/1694) +- Sort applications and tools alphabetically in Settings UI [\#1689](https://github.com/pypeclub/OpenPype/pull/1689) +- \#683 - Validate Frame Range in Standalone Publisher [\#1683](https://github.com/pypeclub/OpenPype/pull/1683) +- Hiero: old container versions identify with red color [\#1682](https://github.com/pypeclub/OpenPype/pull/1682) +- Project Manger: Default name column width [\#1669](https://github.com/pypeclub/OpenPype/pull/1669) +- Remove outline in stylesheet [\#1667](https://github.com/pypeclub/OpenPype/pull/1667) +- TVPaint: Creator take layer name as default value for subset variant [\#1663](https://github.com/pypeclub/OpenPype/pull/1663) +- TVPaint custom subset template [\#1662](https://github.com/pypeclub/OpenPype/pull/1662) +- Editorial: conform assets validator [\#1659](https://github.com/pypeclub/OpenPype/pull/1659) +- Nuke - Publish simplification [\#1653](https://github.com/pypeclub/OpenPype/pull/1653) +- \#1333 - added tooltip hints to Pyblish buttons [\#1649](https://github.com/pypeclub/OpenPype/pull/1649) + +**🐛 Bug fixes** + +- Nuke: broken publishing rendered frames [\#1707](https://github.com/pypeclub/OpenPype/pull/1707) +- Standalone publisher Thumbnail export args [\#1705](https://github.com/pypeclub/OpenPype/pull/1705) +- Bad zip can break OpenPype start [\#1691](https://github.com/pypeclub/OpenPype/pull/1691) +- Hiero: published whole edit mov [\#1687](https://github.com/pypeclub/OpenPype/pull/1687) +- Ftrack subprocess handle of stdout/stderr [\#1675](https://github.com/pypeclub/OpenPype/pull/1675) +- Settings list race condifiton and mutable dict list conversion [\#1671](https://github.com/pypeclub/OpenPype/pull/1671) +- Mac launch arguments fix [\#1660](https://github.com/pypeclub/OpenPype/pull/1660) +- Fix missing dbm python module [\#1652](https://github.com/pypeclub/OpenPype/pull/1652) +- Transparent branches in view on Mac [\#1648](https://github.com/pypeclub/OpenPype/pull/1648) +- Add asset on task item [\#1646](https://github.com/pypeclub/OpenPype/pull/1646) +- Project manager save and queue [\#1645](https://github.com/pypeclub/OpenPype/pull/1645) +- New project anatomy values [\#1644](https://github.com/pypeclub/OpenPype/pull/1644) +- Farm publishing: check if published items do exist [\#1573](https://github.com/pypeclub/OpenPype/pull/1573) + +**Merged pull requests:** + +- Bump normalize-url from 4.5.0 to 4.5.1 in /website [\#1686](https://github.com/pypeclub/OpenPype/pull/1686) ## [3.0.0](https://github.com/pypeclub/openpype/tree/3.0.0) @@ -283,12 +1925,12 @@ - Easy to add Application versions. - Per Project Environment and plugin management. - Robust profile system for creating reviewables and burnins, with filtering based on Application, Task and data family. -- Configurable publish plugins. +- Configurable publish plugins. - Options to make any validator or extractor, optional or disabled. - Color Management is now unified under anatomy settings. - Subset naming and grouping is fully configurable. - All project attributes can now be set directly in OpenPype settings. -- Studio Setting can be locked to prevent unwanted artist changes. +- Studio Setting can be locked to prevent unwanted artist changes. - You can now add per project and per task type templates for workfile initialization in most hosts. - Too many other individual configurable option to list in this changelog :) @@ -1046,8 +2688,6 @@ - Standalone Publisher: getting fps from context instead of nonexistent entity [\#729](https://github.com/pypeclub/pype/pull/729) -# Changelog - ## [2.13.6](https://github.com/pypeclub/pype/tree/2.13.6) (2020-11-15) [Full Changelog](https://github.com/pypeclub/pype/compare/2.13.5...2.13.6) @@ -1837,9 +3477,4 @@ A large cleanup release. Most of the change are under the hood. - _(avalon)_ subsets in maya 2019 weren't behaving correctly in the outliner - - - - - - +\* *This Changelog was automatically generated by [github_changelog_generator](https://github.com/github-changelog-generator/github-changelog-generator)* diff --git a/HISTORY.md b/HISTORY.md index 032f876aa3..ca54c60273 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,5 +1,1811 @@ # Changelog +## [3.14.2](https://github.com/pypeclub/OpenPype/tree/3.14.2) (2022-09-12) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...3.14.2) + +### 📖 Documentation + +- Documentation: Anatomy templates [\#3618](https://github.com/pypeclub/OpenPype/pull/3618) + +**🆕 New features** + +- Nuke: Build workfile by template [\#3763](https://github.com/pypeclub/OpenPype/pull/3763) +- Houdini: Publishing workfiles [\#3697](https://github.com/pypeclub/OpenPype/pull/3697) +- Global: making collect audio plugin global [\#3679](https://github.com/pypeclub/OpenPype/pull/3679) + +**🚀 Enhancements** + +- Flame: Adding Creator's retimed shot and handles switch [\#3826](https://github.com/pypeclub/OpenPype/pull/3826) +- Flame: OpenPype submenu to batch and media manager [\#3825](https://github.com/pypeclub/OpenPype/pull/3825) +- General: Better pixmap scaling [\#3809](https://github.com/pypeclub/OpenPype/pull/3809) +- Photoshop: attempt to speed up ExtractImage [\#3793](https://github.com/pypeclub/OpenPype/pull/3793) +- SyncServer: Added cli commands for sync server [\#3765](https://github.com/pypeclub/OpenPype/pull/3765) +- Kitsu: Drop 'entities root' setting. [\#3739](https://github.com/pypeclub/OpenPype/pull/3739) +- git: update gitignore [\#3722](https://github.com/pypeclub/OpenPype/pull/3722) +- Blender: Publisher collect workfile representation [\#3670](https://github.com/pypeclub/OpenPype/pull/3670) +- Maya: move set render settings menu entry [\#3669](https://github.com/pypeclub/OpenPype/pull/3669) +- Scene Inventory: Maya add actions to select from or to scene [\#3659](https://github.com/pypeclub/OpenPype/pull/3659) +- Scene Inventory: Add subsetGroup column [\#3658](https://github.com/pypeclub/OpenPype/pull/3658) + +**🐛 Bug fixes** + +- General: Fix Pattern access in client code [\#3828](https://github.com/pypeclub/OpenPype/pull/3828) +- Launcher: Skip opening last work file works for groups [\#3822](https://github.com/pypeclub/OpenPype/pull/3822) +- Maya: Publishing data key change [\#3811](https://github.com/pypeclub/OpenPype/pull/3811) +- Igniter: Fix status handling when version is already installed [\#3804](https://github.com/pypeclub/OpenPype/pull/3804) +- Resolve: Addon import is Python 2 compatible [\#3798](https://github.com/pypeclub/OpenPype/pull/3798) +- Hiero: retimed clip publishing is working [\#3792](https://github.com/pypeclub/OpenPype/pull/3792) +- nuke: validate write node is not failing due wrong type [\#3780](https://github.com/pypeclub/OpenPype/pull/3780) +- Fix - changed format of version string in pyproject.toml [\#3777](https://github.com/pypeclub/OpenPype/pull/3777) +- Ftrack status fix typo prgoress -\> progress [\#3761](https://github.com/pypeclub/OpenPype/pull/3761) +- Fix version resolution [\#3757](https://github.com/pypeclub/OpenPype/pull/3757) +- Maya: `containerise` dont skip empty values [\#3674](https://github.com/pypeclub/OpenPype/pull/3674) + +**🔀 Refactored code** + +- Photoshop: Use new Extractor location [\#3789](https://github.com/pypeclub/OpenPype/pull/3789) +- Blender: Use new Extractor location [\#3787](https://github.com/pypeclub/OpenPype/pull/3787) +- AfterEffects: Use new Extractor location [\#3784](https://github.com/pypeclub/OpenPype/pull/3784) +- General: Remove unused teshost [\#3773](https://github.com/pypeclub/OpenPype/pull/3773) +- General: Copied 'Extractor' plugin to publish pipeline [\#3771](https://github.com/pypeclub/OpenPype/pull/3771) +- General: Move queries of asset and representation links [\#3770](https://github.com/pypeclub/OpenPype/pull/3770) +- General: Move create project folders to pipeline [\#3768](https://github.com/pypeclub/OpenPype/pull/3768) +- General: Create project function moved to client code [\#3766](https://github.com/pypeclub/OpenPype/pull/3766) +- Maya: Refactor submit deadline to use AbstractSubmitDeadline [\#3759](https://github.com/pypeclub/OpenPype/pull/3759) +- General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) +- General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) +- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) +- Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) +- Fusion: Defined fusion as addon [\#3733](https://github.com/pypeclub/OpenPype/pull/3733) +- Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) +- Resolve: Define resolve as addon [\#3727](https://github.com/pypeclub/OpenPype/pull/3727) + +**Merged pull requests:** + +- Standalone Publisher: Ignore empty labels, then still use name like other asset models [\#3779](https://github.com/pypeclub/OpenPype/pull/3779) +- Kitsu - sync\_all\_project - add list ignore\_projects [\#3776](https://github.com/pypeclub/OpenPype/pull/3776) + +## [3.14.1](https://github.com/pypeclub/OpenPype/tree/3.14.1) (2022-08-30) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.0...3.14.1) + +### 📖 Documentation + +- Documentation: Few updates [\#3698](https://github.com/pypeclub/OpenPype/pull/3698) +- Documentation: Settings development [\#3660](https://github.com/pypeclub/OpenPype/pull/3660) + +**🆕 New features** + +- Webpublisher:change create flatten image into tri state [\#3678](https://github.com/pypeclub/OpenPype/pull/3678) +- Blender: validators code correction with settings and defaults [\#3662](https://github.com/pypeclub/OpenPype/pull/3662) + +**🚀 Enhancements** + +- General: Thumbnail can use project roots [\#3750](https://github.com/pypeclub/OpenPype/pull/3750) +- Settings: Remove settings lock on tray exit [\#3720](https://github.com/pypeclub/OpenPype/pull/3720) +- General: Added helper getters to modules manager [\#3712](https://github.com/pypeclub/OpenPype/pull/3712) +- Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) +- Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) +- General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686) +- Ftrack: Store ftrack entities on hierarchy integration to instances [\#3677](https://github.com/pypeclub/OpenPype/pull/3677) +- Ftrack: More logs related to auto sync value change [\#3671](https://github.com/pypeclub/OpenPype/pull/3671) +- Blender: ops refresh manager after process events [\#3663](https://github.com/pypeclub/OpenPype/pull/3663) + +**🐛 Bug fixes** + +- Maya: Fix typo in getPanel argument `with_focus` -\> `withFocus` [\#3753](https://github.com/pypeclub/OpenPype/pull/3753) +- General: Smaller fixes of imports [\#3748](https://github.com/pypeclub/OpenPype/pull/3748) +- General: Logger tweaks [\#3741](https://github.com/pypeclub/OpenPype/pull/3741) +- Nuke: missing job dependency if multiple bake streams [\#3737](https://github.com/pypeclub/OpenPype/pull/3737) +- Nuke: color-space settings from anatomy is working [\#3721](https://github.com/pypeclub/OpenPype/pull/3721) +- Settings: Fix studio default anatomy save [\#3716](https://github.com/pypeclub/OpenPype/pull/3716) +- Maya: Use project name instead of project code [\#3709](https://github.com/pypeclub/OpenPype/pull/3709) +- Settings: Fix project overrides save [\#3708](https://github.com/pypeclub/OpenPype/pull/3708) +- Workfiles tool: Fix published workfile filtering [\#3704](https://github.com/pypeclub/OpenPype/pull/3704) +- PS, AE: Provide default variant value for workfile subset [\#3703](https://github.com/pypeclub/OpenPype/pull/3703) +- RoyalRender: handle host name that is not set [\#3695](https://github.com/pypeclub/OpenPype/pull/3695) +- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) +- Webpublisher: added check for empty context [\#3682](https://github.com/pypeclub/OpenPype/pull/3682) + +**🔀 Refactored code** + +- General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) +- General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) +- Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) +- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) +- Harmony: Defined harmony as addon [\#3734](https://github.com/pypeclub/OpenPype/pull/3734) +- General: Module interfaces cleanup [\#3731](https://github.com/pypeclub/OpenPype/pull/3731) +- AfterEffects: Move AE functions from general lib [\#3730](https://github.com/pypeclub/OpenPype/pull/3730) +- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) +- AfterEffects: Define AfterEffects as module [\#3728](https://github.com/pypeclub/OpenPype/pull/3728) +- General: Replace PypeLogger with Logger [\#3725](https://github.com/pypeclub/OpenPype/pull/3725) +- Nuke: Define nuke as module [\#3724](https://github.com/pypeclub/OpenPype/pull/3724) +- General: Move subset name functionality [\#3723](https://github.com/pypeclub/OpenPype/pull/3723) +- General: Move creators plugin getter [\#3714](https://github.com/pypeclub/OpenPype/pull/3714) +- General: Move constants from lib to client [\#3713](https://github.com/pypeclub/OpenPype/pull/3713) +- Loader: Subset groups using client operations [\#3710](https://github.com/pypeclub/OpenPype/pull/3710) +- TVPaint: Defined as module [\#3707](https://github.com/pypeclub/OpenPype/pull/3707) +- StandalonePublisher: Define StandalonePublisher as module [\#3706](https://github.com/pypeclub/OpenPype/pull/3706) +- TrayPublisher: Define TrayPublisher as module [\#3705](https://github.com/pypeclub/OpenPype/pull/3705) +- General: Move context specific functions to context tools [\#3702](https://github.com/pypeclub/OpenPype/pull/3702) + +**Merged pull requests:** + +- Hiero: Define hiero as module [\#3717](https://github.com/pypeclub/OpenPype/pull/3717) +- Deadline: better logging for DL webservice failures [\#3694](https://github.com/pypeclub/OpenPype/pull/3694) +- Photoshop: resize saved images in ExtractReview for ffmpeg [\#3676](https://github.com/pypeclub/OpenPype/pull/3676) +- Nuke: Validation refactory to new publisher [\#3567](https://github.com/pypeclub/OpenPype/pull/3567) + +## [3.14.0](https://github.com/pypeclub/OpenPype/tree/3.14.0) (2022-08-18) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.13.0...3.14.0) + +**🆕 New features** + +- Maya: Build workfile by template [\#3578](https://github.com/pypeclub/OpenPype/pull/3578) +- Maya: Implementation of JSON layout for Unreal workflow [\#3353](https://github.com/pypeclub/OpenPype/pull/3353) +- Maya: Build workfile by template [\#3315](https://github.com/pypeclub/OpenPype/pull/3315) + +**🚀 Enhancements** + +- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) +- Ftrack: Set task status on farm publishing [\#3680](https://github.com/pypeclub/OpenPype/pull/3680) +- Ftrack: Set task status on task creation in integrate hierarchy [\#3675](https://github.com/pypeclub/OpenPype/pull/3675) +- Maya: Disable rendering of all lights for render instances submitted through Deadline. [\#3661](https://github.com/pypeclub/OpenPype/pull/3661) +- General: Optimized OCIO configs [\#3650](https://github.com/pypeclub/OpenPype/pull/3650) + +**🐛 Bug fixes** + +- General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691) +- General: Fix finding of last version [\#3656](https://github.com/pypeclub/OpenPype/pull/3656) +- General: Extract Review can scale with pixel aspect ratio [\#3644](https://github.com/pypeclub/OpenPype/pull/3644) +- Maya: Refactor moved usage of CreateRender settings [\#3643](https://github.com/pypeclub/OpenPype/pull/3643) +- General: Hero version representations have full context [\#3638](https://github.com/pypeclub/OpenPype/pull/3638) +- Nuke: color settings for render write node is working now [\#3632](https://github.com/pypeclub/OpenPype/pull/3632) +- Maya: FBX support for update in reference loader [\#3631](https://github.com/pypeclub/OpenPype/pull/3631) + +**🔀 Refactored code** + +- General: Use client projects getter [\#3673](https://github.com/pypeclub/OpenPype/pull/3673) +- Resolve: Match folder structure to other hosts [\#3653](https://github.com/pypeclub/OpenPype/pull/3653) +- Maya: Hosts as modules [\#3647](https://github.com/pypeclub/OpenPype/pull/3647) +- TimersManager: Plugins are in timers manager module [\#3639](https://github.com/pypeclub/OpenPype/pull/3639) +- General: Move workfiles functions into pipeline [\#3637](https://github.com/pypeclub/OpenPype/pull/3637) +- General: Workfiles builder using query functions [\#3598](https://github.com/pypeclub/OpenPype/pull/3598) + +**Merged pull requests:** + +- Deadline: Global job pre load is not Pype 2 compatible [\#3666](https://github.com/pypeclub/OpenPype/pull/3666) +- Maya: Remove unused get current renderer logic [\#3645](https://github.com/pypeclub/OpenPype/pull/3645) +- Kitsu|Fix: Movie project type fails & first loop children names [\#3636](https://github.com/pypeclub/OpenPype/pull/3636) +- fix the bug of failing to extract look when UDIMs format used in AiImage [\#3628](https://github.com/pypeclub/OpenPype/pull/3628) + +## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.2...3.13.0) + +**🆕 New features** + +- Support for mutliple installed versions - 3.13 [\#3605](https://github.com/pypeclub/OpenPype/pull/3605) +- Traypublisher: simple editorial publishing [\#3492](https://github.com/pypeclub/OpenPype/pull/3492) + +**🚀 Enhancements** + +- Editorial: Mix audio use side file for ffmpeg filters [\#3630](https://github.com/pypeclub/OpenPype/pull/3630) +- Ftrack: Comment template can contain optional keys [\#3615](https://github.com/pypeclub/OpenPype/pull/3615) +- Ftrack: Add more metadata to ftrack components [\#3612](https://github.com/pypeclub/OpenPype/pull/3612) +- General: Add context to pyblish context [\#3594](https://github.com/pypeclub/OpenPype/pull/3594) +- Kitsu: Shot&Sequence name with prefix over appends [\#3593](https://github.com/pypeclub/OpenPype/pull/3593) +- Photoshop: implemented {layer} placeholder in subset template [\#3591](https://github.com/pypeclub/OpenPype/pull/3591) +- General: Python module appdirs from git [\#3589](https://github.com/pypeclub/OpenPype/pull/3589) +- Ftrack: Update ftrack api to 2.3.3 [\#3588](https://github.com/pypeclub/OpenPype/pull/3588) +- General: New Integrator small fixes [\#3583](https://github.com/pypeclub/OpenPype/pull/3583) +- Maya: Render Creator has configurable options. [\#3097](https://github.com/pypeclub/OpenPype/pull/3097) + +**🐛 Bug fixes** + +- Maya: fix aov separator in Redshift [\#3625](https://github.com/pypeclub/OpenPype/pull/3625) +- Fix for multi-version build on Mac [\#3622](https://github.com/pypeclub/OpenPype/pull/3622) +- Ftrack: Sync hierarchical attributes can handle new created entities [\#3621](https://github.com/pypeclub/OpenPype/pull/3621) +- General: Extract review aspect ratio scale is calculated by ffmpeg [\#3620](https://github.com/pypeclub/OpenPype/pull/3620) +- Maya: Fix types of default settings [\#3617](https://github.com/pypeclub/OpenPype/pull/3617) +- Integrator: Don't force to have dot before frame [\#3611](https://github.com/pypeclub/OpenPype/pull/3611) +- AfterEffects: refactored integrate doesnt work formulti frame publishes [\#3610](https://github.com/pypeclub/OpenPype/pull/3610) +- Maya look data contents fails with custom attribute on group [\#3607](https://github.com/pypeclub/OpenPype/pull/3607) +- TrayPublisher: Fix wrong conflict merge [\#3600](https://github.com/pypeclub/OpenPype/pull/3600) +- Bugfix: Add OCIO as submodule to prepare for handling `maketx` color space conversion. [\#3590](https://github.com/pypeclub/OpenPype/pull/3590) +- Fix general settings environment variables resolution [\#3587](https://github.com/pypeclub/OpenPype/pull/3587) +- Editorial publishing workflow improvements [\#3580](https://github.com/pypeclub/OpenPype/pull/3580) +- General: Update imports in start script [\#3579](https://github.com/pypeclub/OpenPype/pull/3579) +- Nuke: render family integration consistency [\#3576](https://github.com/pypeclub/OpenPype/pull/3576) +- Ftrack: Handle missing published path in integrator [\#3570](https://github.com/pypeclub/OpenPype/pull/3570) +- Nuke: publish existing frames with slate with correct range [\#3555](https://github.com/pypeclub/OpenPype/pull/3555) + +**🔀 Refactored code** + +- General: Plugin settings handled by plugins [\#3623](https://github.com/pypeclub/OpenPype/pull/3623) +- General: Naive implementation of document create, update, delete [\#3601](https://github.com/pypeclub/OpenPype/pull/3601) +- General: Use query functions in general code [\#3596](https://github.com/pypeclub/OpenPype/pull/3596) +- General: Separate extraction of template data into more functions [\#3574](https://github.com/pypeclub/OpenPype/pull/3574) +- General: Lib cleanup [\#3571](https://github.com/pypeclub/OpenPype/pull/3571) + +**Merged pull requests:** + +- Webpublisher: timeout for PS studio processing [\#3619](https://github.com/pypeclub/OpenPype/pull/3619) +- Core: translated validate\_containers.py into New publisher style [\#3614](https://github.com/pypeclub/OpenPype/pull/3614) +- Enable write color sets on animation publish automatically [\#3582](https://github.com/pypeclub/OpenPype/pull/3582) + +## [3.12.2](https://github.com/pypeclub/OpenPype/tree/3.12.2) (2022-07-27) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.1...3.12.2) + +### 📖 Documentation + +- Update website with more studios [\#3554](https://github.com/pypeclub/OpenPype/pull/3554) +- Documentation: Update publishing dev docs [\#3549](https://github.com/pypeclub/OpenPype/pull/3549) + +**🚀 Enhancements** + +- General: Global thumbnail extractor is ready for more cases [\#3561](https://github.com/pypeclub/OpenPype/pull/3561) +- Maya: add additional validators to Settings [\#3540](https://github.com/pypeclub/OpenPype/pull/3540) +- General: Interactive console in cli [\#3526](https://github.com/pypeclub/OpenPype/pull/3526) +- Ftrack: Automatic daily review session creation can define trigger hour [\#3516](https://github.com/pypeclub/OpenPype/pull/3516) +- Ftrack: add source into Note [\#3509](https://github.com/pypeclub/OpenPype/pull/3509) +- Ftrack: Trigger custom ftrack topic of project structure creation [\#3506](https://github.com/pypeclub/OpenPype/pull/3506) +- Settings UI: Add extract to file action on project view [\#3505](https://github.com/pypeclub/OpenPype/pull/3505) +- Add pack and unpack convenience scripts [\#3502](https://github.com/pypeclub/OpenPype/pull/3502) +- General: Event system [\#3499](https://github.com/pypeclub/OpenPype/pull/3499) +- NewPublisher: Keep plugins with mismatch target in report [\#3498](https://github.com/pypeclub/OpenPype/pull/3498) +- Nuke: load clip with options from settings [\#3497](https://github.com/pypeclub/OpenPype/pull/3497) +- TrayPublisher: implemented render\_mov\_batch [\#3486](https://github.com/pypeclub/OpenPype/pull/3486) +- Migrate basic families to the new Tray Publisher [\#3469](https://github.com/pypeclub/OpenPype/pull/3469) +- Enhance powershell build scripts [\#1827](https://github.com/pypeclub/OpenPype/pull/1827) + +**🐛 Bug fixes** + +- Maya: fix Review image plane attribute [\#3569](https://github.com/pypeclub/OpenPype/pull/3569) +- Maya: Fix animated attributes \(ie. overscan\) on loaded cameras breaking review publishing. [\#3562](https://github.com/pypeclub/OpenPype/pull/3562) +- NewPublisher: Python 2 compatible html escape [\#3559](https://github.com/pypeclub/OpenPype/pull/3559) +- Remove invalid submodules from `/vendor` [\#3557](https://github.com/pypeclub/OpenPype/pull/3557) +- General: Remove hosts filter on integrator plugins [\#3556](https://github.com/pypeclub/OpenPype/pull/3556) +- Settings: Clean default values of environments [\#3550](https://github.com/pypeclub/OpenPype/pull/3550) +- Module interfaces: Fix import error [\#3547](https://github.com/pypeclub/OpenPype/pull/3547) +- Workfiles tool: Show of tool and it's flags [\#3539](https://github.com/pypeclub/OpenPype/pull/3539) +- General: Create workfile documents works again [\#3538](https://github.com/pypeclub/OpenPype/pull/3538) +- Additional fixes for powershell scripts [\#3525](https://github.com/pypeclub/OpenPype/pull/3525) +- Maya: Added wrapper around cmds.setAttr [\#3523](https://github.com/pypeclub/OpenPype/pull/3523) +- Nuke: double slate [\#3521](https://github.com/pypeclub/OpenPype/pull/3521) +- General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519) +- Maya: Renderman display output fix [\#3514](https://github.com/pypeclub/OpenPype/pull/3514) +- TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) +- NewPublisher: Publish attributes are properly collected [\#3510](https://github.com/pypeclub/OpenPype/pull/3510) +- TrayPublisher: Make sure host name is filled [\#3504](https://github.com/pypeclub/OpenPype/pull/3504) +- NewPublisher: Groups work and enum multivalue [\#3501](https://github.com/pypeclub/OpenPype/pull/3501) + +**🔀 Refactored code** + +- General: Use query functions in integrator [\#3563](https://github.com/pypeclub/OpenPype/pull/3563) +- General: Mongo core connection moved to client [\#3531](https://github.com/pypeclub/OpenPype/pull/3531) +- Refactor Integrate Asset [\#3530](https://github.com/pypeclub/OpenPype/pull/3530) +- General: Client docstrings cleanup [\#3529](https://github.com/pypeclub/OpenPype/pull/3529) +- General: Move load related functions into pipeline [\#3527](https://github.com/pypeclub/OpenPype/pull/3527) +- General: Get current context document functions [\#3522](https://github.com/pypeclub/OpenPype/pull/3522) +- Kitsu: Use query function from client [\#3496](https://github.com/pypeclub/OpenPype/pull/3496) +- TimersManager: Use query functions [\#3495](https://github.com/pypeclub/OpenPype/pull/3495) +- Deadline: Use query functions [\#3466](https://github.com/pypeclub/OpenPype/pull/3466) +- Refactor Integrate Asset [\#2898](https://github.com/pypeclub/OpenPype/pull/2898) + +**Merged pull requests:** + +- Maya: fix active pane loss [\#3566](https://github.com/pypeclub/OpenPype/pull/3566) + +## [3.12.1](https://github.com/pypeclub/OpenPype/tree/3.12.1) (2022-07-13) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.0...3.12.1) + +### 📖 Documentation + +- Docs: Added minimal permissions for MongoDB [\#3441](https://github.com/pypeclub/OpenPype/pull/3441) + +**🆕 New features** + +- Maya: Add VDB to Arnold loader [\#3433](https://github.com/pypeclub/OpenPype/pull/3433) + +**🚀 Enhancements** + +- TrayPublisher: Added more options for grouping of instances [\#3494](https://github.com/pypeclub/OpenPype/pull/3494) +- NewPublisher: Align creator attributes from top to bottom [\#3487](https://github.com/pypeclub/OpenPype/pull/3487) +- NewPublisher: Added ability to use label of instance [\#3484](https://github.com/pypeclub/OpenPype/pull/3484) +- General: Creator Plugins have access to project [\#3476](https://github.com/pypeclub/OpenPype/pull/3476) +- General: Better arguments order in creator init [\#3475](https://github.com/pypeclub/OpenPype/pull/3475) +- Ftrack: Trigger custom ftrack events on project creation and preparation [\#3465](https://github.com/pypeclub/OpenPype/pull/3465) +- Windows installer: Clean old files and add version subfolder [\#3445](https://github.com/pypeclub/OpenPype/pull/3445) +- Blender: Bugfix - Set fps properly on open [\#3426](https://github.com/pypeclub/OpenPype/pull/3426) +- Hiero: Add custom scripts menu [\#3425](https://github.com/pypeclub/OpenPype/pull/3425) +- Blender: pre pyside install for all platforms [\#3400](https://github.com/pypeclub/OpenPype/pull/3400) +- Maya: Add additional playblast options to review Extractor. [\#3384](https://github.com/pypeclub/OpenPype/pull/3384) +- Maya: Ability to set resolution for playblasts from asset, and override through review instance. [\#3360](https://github.com/pypeclub/OpenPype/pull/3360) +- Maya: Redshift Volume Loader Implement update, remove, switch + fix vdb sequence support [\#3197](https://github.com/pypeclub/OpenPype/pull/3197) +- Maya: Implement `iter_visible_nodes_in_range` for extracting Alembics [\#3100](https://github.com/pypeclub/OpenPype/pull/3100) + +**🐛 Bug fixes** + +- TrayPublisher: Keep use instance label in list view [\#3493](https://github.com/pypeclub/OpenPype/pull/3493) +- General: Extract review use first frame of input sequence [\#3491](https://github.com/pypeclub/OpenPype/pull/3491) +- General: Fix Plist loading for application launch [\#3485](https://github.com/pypeclub/OpenPype/pull/3485) +- Nuke: Workfile tools open on start [\#3479](https://github.com/pypeclub/OpenPype/pull/3479) +- New Publisher: Disabled context change allows creation [\#3478](https://github.com/pypeclub/OpenPype/pull/3478) +- General: thumbnail extractor fix [\#3474](https://github.com/pypeclub/OpenPype/pull/3474) +- Kitsu: bugfix with sync-service ans publish plugins [\#3473](https://github.com/pypeclub/OpenPype/pull/3473) +- Flame: solved problem with multi-selected loading [\#3470](https://github.com/pypeclub/OpenPype/pull/3470) +- General: Fix query function in update logic [\#3468](https://github.com/pypeclub/OpenPype/pull/3468) +- Resolve: removed few bugs [\#3464](https://github.com/pypeclub/OpenPype/pull/3464) +- General: Delete old versions is safer when ftrack is disabled [\#3462](https://github.com/pypeclub/OpenPype/pull/3462) +- Nuke: fixing metadata slate TC difference [\#3455](https://github.com/pypeclub/OpenPype/pull/3455) +- Nuke: prerender reviewable fails [\#3450](https://github.com/pypeclub/OpenPype/pull/3450) +- Maya: fix hashing in Python 3 for tile rendering [\#3447](https://github.com/pypeclub/OpenPype/pull/3447) +- LogViewer: Escape html characters in log message [\#3443](https://github.com/pypeclub/OpenPype/pull/3443) +- Nuke: Slate frame is integrated [\#3427](https://github.com/pypeclub/OpenPype/pull/3427) +- Maya: Camera extra data - additional fix for \#3304 [\#3386](https://github.com/pypeclub/OpenPype/pull/3386) +- Maya: Handle excluding `model` family from frame range validator. [\#3370](https://github.com/pypeclub/OpenPype/pull/3370) + +**🔀 Refactored code** + +- Maya: Merge animation + pointcache extractor logic [\#3461](https://github.com/pypeclub/OpenPype/pull/3461) +- Maya: Re-use `maintained_time` from lib [\#3460](https://github.com/pypeclub/OpenPype/pull/3460) +- General: Use query functions in global plugins [\#3459](https://github.com/pypeclub/OpenPype/pull/3459) +- Clockify: Use query functions in clockify actions [\#3458](https://github.com/pypeclub/OpenPype/pull/3458) +- General: Use query functions in rest api calls [\#3457](https://github.com/pypeclub/OpenPype/pull/3457) +- General: Use query functions in openpype lib functions [\#3454](https://github.com/pypeclub/OpenPype/pull/3454) +- General: Use query functions in load utils [\#3446](https://github.com/pypeclub/OpenPype/pull/3446) +- General: Move publish plugin and publish render abstractions [\#3442](https://github.com/pypeclub/OpenPype/pull/3442) +- General: Use Anatomy after move to pipeline [\#3436](https://github.com/pypeclub/OpenPype/pull/3436) +- General: Anatomy moved to pipeline [\#3435](https://github.com/pypeclub/OpenPype/pull/3435) +- Fusion: Use client query functions [\#3380](https://github.com/pypeclub/OpenPype/pull/3380) +- Resolve: Use client query functions [\#3379](https://github.com/pypeclub/OpenPype/pull/3379) +- General: Host implementation defined with class [\#3337](https://github.com/pypeclub/OpenPype/pull/3337) + +## [3.12.0](https://github.com/pypeclub/OpenPype/tree/3.12.0) (2022-06-28) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.11.1...3.12.0) + +### 📖 Documentation + +- Fix typo in documentation: pyenv on mac [\#3417](https://github.com/pypeclub/OpenPype/pull/3417) +- Linux: update OIIO package [\#3401](https://github.com/pypeclub/OpenPype/pull/3401) + +**🆕 New features** + +- Shotgrid: Add production beta of shotgrid integration [\#2921](https://github.com/pypeclub/OpenPype/pull/2921) + +**🚀 Enhancements** + +- Webserver: Added CORS middleware [\#3422](https://github.com/pypeclub/OpenPype/pull/3422) +- Attribute Defs UI: Files widget show what is allowed to drop in [\#3411](https://github.com/pypeclub/OpenPype/pull/3411) +- General: Add ability to change user value for templates [\#3366](https://github.com/pypeclub/OpenPype/pull/3366) +- Hosts: More options for in-host callbacks [\#3357](https://github.com/pypeclub/OpenPype/pull/3357) +- Multiverse: expose some settings to GUI [\#3350](https://github.com/pypeclub/OpenPype/pull/3350) +- Maya: Allow more data to be published along camera 🎥 [\#3304](https://github.com/pypeclub/OpenPype/pull/3304) +- Add root keys and project keys to create starting folder [\#2755](https://github.com/pypeclub/OpenPype/pull/2755) + +**🐛 Bug fixes** + +- NewPublisher: Fix subset name change on change of creator plugin [\#3420](https://github.com/pypeclub/OpenPype/pull/3420) +- Bug: fix invalid avalon import [\#3418](https://github.com/pypeclub/OpenPype/pull/3418) +- Nuke: Fix keyword argument in query function [\#3414](https://github.com/pypeclub/OpenPype/pull/3414) +- Houdini: fix loading and updating vbd/bgeo sequences [\#3408](https://github.com/pypeclub/OpenPype/pull/3408) +- Nuke: Collect representation files based on Write [\#3407](https://github.com/pypeclub/OpenPype/pull/3407) +- General: Filter representations before integration start [\#3398](https://github.com/pypeclub/OpenPype/pull/3398) +- Maya: look collector typo [\#3392](https://github.com/pypeclub/OpenPype/pull/3392) +- TVPaint: Make sure exit code is set to not None [\#3382](https://github.com/pypeclub/OpenPype/pull/3382) +- Maya: vray device aspect ratio fix [\#3381](https://github.com/pypeclub/OpenPype/pull/3381) +- Flame: bunch of publishing issues [\#3377](https://github.com/pypeclub/OpenPype/pull/3377) +- Harmony: added unc path to zifile command in Harmony [\#3372](https://github.com/pypeclub/OpenPype/pull/3372) +- Standalone: settings improvements [\#3355](https://github.com/pypeclub/OpenPype/pull/3355) +- Nuke: Load full model hierarchy by default [\#3328](https://github.com/pypeclub/OpenPype/pull/3328) +- Nuke: multiple baking streams with correct slate [\#3245](https://github.com/pypeclub/OpenPype/pull/3245) +- Maya: fix image prefix warning in validator [\#3128](https://github.com/pypeclub/OpenPype/pull/3128) + +**🔀 Refactored code** + +- Unreal: Use client query functions [\#3421](https://github.com/pypeclub/OpenPype/pull/3421) +- General: Move editorial lib to pipeline [\#3419](https://github.com/pypeclub/OpenPype/pull/3419) +- Kitsu: renaming to plural func sync\_all\_projects [\#3397](https://github.com/pypeclub/OpenPype/pull/3397) +- Houdini: Use client query functions [\#3395](https://github.com/pypeclub/OpenPype/pull/3395) +- Hiero: Use client query functions [\#3393](https://github.com/pypeclub/OpenPype/pull/3393) +- Nuke: Use client query functions [\#3391](https://github.com/pypeclub/OpenPype/pull/3391) +- Maya: Use client query functions [\#3385](https://github.com/pypeclub/OpenPype/pull/3385) +- Harmony: Use client query functions [\#3378](https://github.com/pypeclub/OpenPype/pull/3378) +- Celaction: Use client query functions [\#3376](https://github.com/pypeclub/OpenPype/pull/3376) +- Photoshop: Use client query functions [\#3375](https://github.com/pypeclub/OpenPype/pull/3375) +- AfterEffects: Use client query functions [\#3374](https://github.com/pypeclub/OpenPype/pull/3374) +- TVPaint: Use client query functions [\#3340](https://github.com/pypeclub/OpenPype/pull/3340) +- Ftrack: Use client query functions [\#3339](https://github.com/pypeclub/OpenPype/pull/3339) +- Standalone Publisher: Use client query functions [\#3330](https://github.com/pypeclub/OpenPype/pull/3330) + +**Merged pull requests:** + +- Sync Queue: Added far future value for null values for dates [\#3371](https://github.com/pypeclub/OpenPype/pull/3371) +- Maya - added support for single frame playblast review [\#3369](https://github.com/pypeclub/OpenPype/pull/3369) +- Houdini: Implement Redshift Proxy Export [\#3196](https://github.com/pypeclub/OpenPype/pull/3196) + +## [3.11.1](https://github.com/pypeclub/OpenPype/tree/3.11.1) (2022-06-20) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.11.0...3.11.1) + +**🆕 New features** + +- Flame: custom export temp folder [\#3346](https://github.com/pypeclub/OpenPype/pull/3346) +- Nuke: removing third-party plugins [\#3344](https://github.com/pypeclub/OpenPype/pull/3344) + +**🚀 Enhancements** + +- Pyblish Pype: Hiding/Close issues [\#3367](https://github.com/pypeclub/OpenPype/pull/3367) +- Ftrack: Removed requirement of pypeclub role from default settings [\#3354](https://github.com/pypeclub/OpenPype/pull/3354) +- Kitsu: Prevent crash on missing frames information [\#3352](https://github.com/pypeclub/OpenPype/pull/3352) +- Ftrack: Open browser from tray [\#3320](https://github.com/pypeclub/OpenPype/pull/3320) +- Enhancement: More control over thumbnail processing. [\#3259](https://github.com/pypeclub/OpenPype/pull/3259) + +**🐛 Bug fixes** + +- Nuke: bake streams with slate on farm [\#3368](https://github.com/pypeclub/OpenPype/pull/3368) +- Harmony: audio validator has wrong logic [\#3364](https://github.com/pypeclub/OpenPype/pull/3364) +- Nuke: Fix missing variable in extract thumbnail [\#3363](https://github.com/pypeclub/OpenPype/pull/3363) +- Nuke: Fix precollect writes [\#3361](https://github.com/pypeclub/OpenPype/pull/3361) +- AE- fix validate\_scene\_settings and renderLocal [\#3358](https://github.com/pypeclub/OpenPype/pull/3358) +- deadline: fixing misidentification of revieables [\#3356](https://github.com/pypeclub/OpenPype/pull/3356) +- General: Create only one thumbnail per instance [\#3351](https://github.com/pypeclub/OpenPype/pull/3351) +- nuke: adding extract thumbnail settings 3.10 [\#3347](https://github.com/pypeclub/OpenPype/pull/3347) +- General: Fix last version function [\#3345](https://github.com/pypeclub/OpenPype/pull/3345) +- Deadline: added OPENPYPE\_MONGO to filter [\#3336](https://github.com/pypeclub/OpenPype/pull/3336) +- Nuke: fixing farm publishing if review is disabled [\#3306](https://github.com/pypeclub/OpenPype/pull/3306) +- Maya: Fix Yeti errors on Create, Publish and Load [\#3198](https://github.com/pypeclub/OpenPype/pull/3198) + +**🔀 Refactored code** + +- Webpublisher: Use client query functions [\#3333](https://github.com/pypeclub/OpenPype/pull/3333) + +## [3.11.0](https://github.com/pypeclub/OpenPype/tree/3.11.0) (2022-06-17) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.10.0...3.11.0) + +### 📖 Documentation + +- Documentation: Add app key to template documentation [\#3299](https://github.com/pypeclub/OpenPype/pull/3299) +- doc: adding royal render and multiverse to the web site [\#3285](https://github.com/pypeclub/OpenPype/pull/3285) +- Module: Kitsu module [\#2650](https://github.com/pypeclub/OpenPype/pull/2650) + +**🆕 New features** + +- Multiverse: fixed composition write, full docs, cosmetics [\#3178](https://github.com/pypeclub/OpenPype/pull/3178) + +**🚀 Enhancements** + +- Settings: Settings can be extracted from UI [\#3323](https://github.com/pypeclub/OpenPype/pull/3323) +- updated poetry installation source [\#3316](https://github.com/pypeclub/OpenPype/pull/3316) +- Ftrack: Action to easily create daily review session [\#3310](https://github.com/pypeclub/OpenPype/pull/3310) +- TVPaint: Extractor use mark in/out range to render [\#3309](https://github.com/pypeclub/OpenPype/pull/3309) +- Ftrack: Delivery action can work on ReviewSessions [\#3307](https://github.com/pypeclub/OpenPype/pull/3307) +- Maya: Look assigner UI improvements [\#3298](https://github.com/pypeclub/OpenPype/pull/3298) +- Ftrack: Action to transfer values of hierarchical attributes [\#3284](https://github.com/pypeclub/OpenPype/pull/3284) +- Maya: better handling of legacy review subsets names [\#3269](https://github.com/pypeclub/OpenPype/pull/3269) +- General: Updated windows oiio tool [\#3268](https://github.com/pypeclub/OpenPype/pull/3268) +- Unreal: add support for skeletalMesh and staticMesh to loaders [\#3267](https://github.com/pypeclub/OpenPype/pull/3267) +- Maya: reference loaders could store placeholder in referenced url [\#3264](https://github.com/pypeclub/OpenPype/pull/3264) +- TVPaint: Init file for TVPaint worker also handle guideline images [\#3250](https://github.com/pypeclub/OpenPype/pull/3250) +- Nuke: Change default icon path in settings [\#3247](https://github.com/pypeclub/OpenPype/pull/3247) +- Maya: publishing of animation and pointcache on a farm [\#3225](https://github.com/pypeclub/OpenPype/pull/3225) +- Maya: Look assigner UI improvements [\#3208](https://github.com/pypeclub/OpenPype/pull/3208) +- Nuke: add pointcache and animation to loader [\#3186](https://github.com/pypeclub/OpenPype/pull/3186) +- Nuke: Add a gizmo menu [\#3172](https://github.com/pypeclub/OpenPype/pull/3172) +- Support for Unreal 5 [\#3122](https://github.com/pypeclub/OpenPype/pull/3122) + +**🐛 Bug fixes** + +- General: Handle empty source key on instance [\#3342](https://github.com/pypeclub/OpenPype/pull/3342) +- Houdini: Fix Houdini VDB manage update wrong file attribute name [\#3322](https://github.com/pypeclub/OpenPype/pull/3322) +- Nuke: anatomy compatibility issue hacks [\#3321](https://github.com/pypeclub/OpenPype/pull/3321) +- hiero: otio p3 compatibility issue - metadata on effect use update 3.11 [\#3314](https://github.com/pypeclub/OpenPype/pull/3314) +- General: Vendorized modules for Python 2 and update poetry lock [\#3305](https://github.com/pypeclub/OpenPype/pull/3305) +- Fix - added local targets to install host [\#3303](https://github.com/pypeclub/OpenPype/pull/3303) +- Settings: Add missing default settings for nuke gizmo [\#3301](https://github.com/pypeclub/OpenPype/pull/3301) +- Maya: Fix swaped width and height in reviews [\#3300](https://github.com/pypeclub/OpenPype/pull/3300) +- Maya: point cache publish handles Maya instances [\#3297](https://github.com/pypeclub/OpenPype/pull/3297) +- Global: extract review slate issues [\#3286](https://github.com/pypeclub/OpenPype/pull/3286) +- Webpublisher: return only active projects in ProjectsEndpoint [\#3281](https://github.com/pypeclub/OpenPype/pull/3281) +- Hiero: add support for task tags 3.10.x [\#3279](https://github.com/pypeclub/OpenPype/pull/3279) +- General: Fix Oiio tool path resolving [\#3278](https://github.com/pypeclub/OpenPype/pull/3278) +- Maya: Fix udim support for e.g. uppercase \ tag [\#3266](https://github.com/pypeclub/OpenPype/pull/3266) +- Nuke: bake reformat was failing on string type [\#3261](https://github.com/pypeclub/OpenPype/pull/3261) +- Maya: hotfix Pxr multitexture in looks [\#3260](https://github.com/pypeclub/OpenPype/pull/3260) +- Unreal: Fix Camera Loading if Layout is missing [\#3255](https://github.com/pypeclub/OpenPype/pull/3255) +- Unreal: Fixed Animation loading in UE5 [\#3240](https://github.com/pypeclub/OpenPype/pull/3240) +- Unreal: Fixed Render creation in UE5 [\#3239](https://github.com/pypeclub/OpenPype/pull/3239) +- Unreal: Fixed Camera loading in UE5 [\#3238](https://github.com/pypeclub/OpenPype/pull/3238) +- Flame: debugging [\#3224](https://github.com/pypeclub/OpenPype/pull/3224) +- add silent audio to slate [\#3162](https://github.com/pypeclub/OpenPype/pull/3162) +- Add timecode to slate [\#2929](https://github.com/pypeclub/OpenPype/pull/2929) + +**🔀 Refactored code** + +- Blender: Use client query functions [\#3331](https://github.com/pypeclub/OpenPype/pull/3331) +- General: Define query functions [\#3288](https://github.com/pypeclub/OpenPype/pull/3288) + +**Merged pull requests:** + +- Maya: add pointcache family to gpu cache loader [\#3318](https://github.com/pypeclub/OpenPype/pull/3318) +- Maya look: skip empty file attributes [\#3274](https://github.com/pypeclub/OpenPype/pull/3274) + +## [3.10.0](https://github.com/pypeclub/OpenPype/tree/3.10.0) (2022-05-26) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.8...3.10.0) + +### 📖 Documentation + +- Docs: add all-contributors config and initial list [\#3094](https://github.com/pypeclub/OpenPype/pull/3094) +- Nuke docs with videos [\#3052](https://github.com/pypeclub/OpenPype/pull/3052) + +**🆕 New features** + +- General: OpenPype modules publish plugins are registered in host [\#3180](https://github.com/pypeclub/OpenPype/pull/3180) +- General: Creator plugins from addons can be registered [\#3179](https://github.com/pypeclub/OpenPype/pull/3179) +- Ftrack: Single image reviewable [\#3157](https://github.com/pypeclub/OpenPype/pull/3157) +- Nuke: Expose write attributes to settings [\#3123](https://github.com/pypeclub/OpenPype/pull/3123) +- Hiero: Initial frame publish support [\#3106](https://github.com/pypeclub/OpenPype/pull/3106) +- Unreal: Render Publishing [\#2917](https://github.com/pypeclub/OpenPype/pull/2917) +- AfterEffects: Implemented New Publisher [\#2838](https://github.com/pypeclub/OpenPype/pull/2838) +- Unreal: Rendering implementation [\#2410](https://github.com/pypeclub/OpenPype/pull/2410) + +**🚀 Enhancements** + +- Maya: FBX camera export [\#3253](https://github.com/pypeclub/OpenPype/pull/3253) +- General: updating common vendor `scriptmenu` to 1.5.2 [\#3246](https://github.com/pypeclub/OpenPype/pull/3246) +- Project Manager: Allow to paste Tasks into multiple assets at the same time [\#3226](https://github.com/pypeclub/OpenPype/pull/3226) +- Project manager: Sped up project load [\#3216](https://github.com/pypeclub/OpenPype/pull/3216) +- Loader UI: Speed issues of loader with sync server [\#3199](https://github.com/pypeclub/OpenPype/pull/3199) +- Looks: add basic support for Renderman [\#3190](https://github.com/pypeclub/OpenPype/pull/3190) +- Maya: added clean\_import option to Import loader [\#3181](https://github.com/pypeclub/OpenPype/pull/3181) +- Add the scripts menu definition to nuke [\#3168](https://github.com/pypeclub/OpenPype/pull/3168) +- Maya: add maya 2023 to default applications [\#3167](https://github.com/pypeclub/OpenPype/pull/3167) +- Compressed bgeo publishing in SAP and Houdini loader [\#3153](https://github.com/pypeclub/OpenPype/pull/3153) +- General: Add 'dataclasses' to required python modules [\#3149](https://github.com/pypeclub/OpenPype/pull/3149) +- Hooks: Tweak logging grammar [\#3147](https://github.com/pypeclub/OpenPype/pull/3147) +- Nuke: settings for reformat node in CreateWriteRender node [\#3143](https://github.com/pypeclub/OpenPype/pull/3143) +- Houdini: Add loader for alembic through Alembic Archive node [\#3140](https://github.com/pypeclub/OpenPype/pull/3140) +- Publisher: UI Modifications and fixes [\#3139](https://github.com/pypeclub/OpenPype/pull/3139) +- General: Simplified OP modules/addons import [\#3137](https://github.com/pypeclub/OpenPype/pull/3137) +- Terminal: Tweak coloring of TrayModuleManager logging enabled states [\#3133](https://github.com/pypeclub/OpenPype/pull/3133) +- General: Cleanup some Loader docstrings [\#3131](https://github.com/pypeclub/OpenPype/pull/3131) +- Nuke: render instance with subset name filtered overrides [\#3117](https://github.com/pypeclub/OpenPype/pull/3117) +- Unreal: Layout and Camera update and remove functions reimplemented and improvements [\#3116](https://github.com/pypeclub/OpenPype/pull/3116) +- Settings: Remove environment groups from settings [\#3115](https://github.com/pypeclub/OpenPype/pull/3115) +- TVPaint: Match renderlayer key with other hosts [\#3110](https://github.com/pypeclub/OpenPype/pull/3110) +- Ftrack: AssetVersion status on publish [\#3108](https://github.com/pypeclub/OpenPype/pull/3108) +- Tray publisher: Simple families from settings [\#3105](https://github.com/pypeclub/OpenPype/pull/3105) +- Local Settings UI: Overlay messages on save and reset [\#3104](https://github.com/pypeclub/OpenPype/pull/3104) +- General: Remove repos related logic [\#3087](https://github.com/pypeclub/OpenPype/pull/3087) +- Standalone publisher: add support for bgeo and vdb [\#3080](https://github.com/pypeclub/OpenPype/pull/3080) +- Houdini: Fix FPS + outdated content pop-ups [\#3079](https://github.com/pypeclub/OpenPype/pull/3079) +- General: Add global log verbose arguments [\#3070](https://github.com/pypeclub/OpenPype/pull/3070) +- Flame: extract presets distribution [\#3063](https://github.com/pypeclub/OpenPype/pull/3063) +- Update collect\_render.py [\#3055](https://github.com/pypeclub/OpenPype/pull/3055) +- SiteSync: Added compute\_resource\_sync\_sites to sync\_server\_module [\#2983](https://github.com/pypeclub/OpenPype/pull/2983) +- Maya: Implement Hardware Renderer 2.0 support for Render Products [\#2611](https://github.com/pypeclub/OpenPype/pull/2611) + +**🐛 Bug fixes** + +- nuke: use framerange issue [\#3254](https://github.com/pypeclub/OpenPype/pull/3254) +- Ftrack: Chunk sizes for queries has minimal condition [\#3244](https://github.com/pypeclub/OpenPype/pull/3244) +- Maya: renderman displays needs to be filtered [\#3242](https://github.com/pypeclub/OpenPype/pull/3242) +- Ftrack: Validate that the user exists on ftrack [\#3237](https://github.com/pypeclub/OpenPype/pull/3237) +- Maya: Fix support for multiple resolutions [\#3236](https://github.com/pypeclub/OpenPype/pull/3236) +- TVPaint: Look for more groups than 12 [\#3228](https://github.com/pypeclub/OpenPype/pull/3228) +- Hiero: debugging frame range and other 3.10 [\#3222](https://github.com/pypeclub/OpenPype/pull/3222) +- Project Manager: Fix persistent editors on project change [\#3218](https://github.com/pypeclub/OpenPype/pull/3218) +- Deadline: instance data overwrite fix [\#3214](https://github.com/pypeclub/OpenPype/pull/3214) +- Ftrack: Push hierarchical attributes action works [\#3210](https://github.com/pypeclub/OpenPype/pull/3210) +- Standalone Publisher: Always create new representation for thumbnail [\#3203](https://github.com/pypeclub/OpenPype/pull/3203) +- Photoshop: skip collector when automatic testing [\#3202](https://github.com/pypeclub/OpenPype/pull/3202) +- Nuke: render/workfile version sync doesn't work on farm [\#3185](https://github.com/pypeclub/OpenPype/pull/3185) +- Ftrack: Review image only if there are no mp4 reviews [\#3183](https://github.com/pypeclub/OpenPype/pull/3183) +- Ftrack: Locations deepcopy issue [\#3177](https://github.com/pypeclub/OpenPype/pull/3177) +- General: Avoid creating multiple thumbnails [\#3176](https://github.com/pypeclub/OpenPype/pull/3176) +- General/Hiero: better clip duration calculation [\#3169](https://github.com/pypeclub/OpenPype/pull/3169) +- General: Oiio conversion for ffmpeg checks for invalid characters [\#3166](https://github.com/pypeclub/OpenPype/pull/3166) +- Fix for attaching render to subset [\#3164](https://github.com/pypeclub/OpenPype/pull/3164) +- Harmony: fixed missing task name in render instance [\#3163](https://github.com/pypeclub/OpenPype/pull/3163) +- Ftrack: Action delete old versions formatting works [\#3152](https://github.com/pypeclub/OpenPype/pull/3152) +- Deadline: fix the output directory [\#3144](https://github.com/pypeclub/OpenPype/pull/3144) +- General: New Session schema [\#3141](https://github.com/pypeclub/OpenPype/pull/3141) +- General: Missing version on headless mode crash properly [\#3136](https://github.com/pypeclub/OpenPype/pull/3136) +- TVPaint: Composite layers in reversed order [\#3135](https://github.com/pypeclub/OpenPype/pull/3135) +- Nuke: fixing default settings for workfile builder loaders [\#3120](https://github.com/pypeclub/OpenPype/pull/3120) +- Nuke: fix anatomy imageio regex default [\#3119](https://github.com/pypeclub/OpenPype/pull/3119) +- General: Python 3 compatibility in queries [\#3112](https://github.com/pypeclub/OpenPype/pull/3112) +- General: TemplateResult can be copied [\#3099](https://github.com/pypeclub/OpenPype/pull/3099) +- General: Collect loaded versions skips not existing representations [\#3095](https://github.com/pypeclub/OpenPype/pull/3095) +- RoyalRender Control Submission - AVALON\_APP\_NAME default [\#3091](https://github.com/pypeclub/OpenPype/pull/3091) +- Ftrack: Update Create Folders action [\#3089](https://github.com/pypeclub/OpenPype/pull/3089) +- Maya: Collect Render fix any render cameras check [\#3088](https://github.com/pypeclub/OpenPype/pull/3088) +- Project Manager: Avoid unnecessary updates of asset documents [\#3083](https://github.com/pypeclub/OpenPype/pull/3083) +- Standalone publisher: Fix plugins install [\#3077](https://github.com/pypeclub/OpenPype/pull/3077) +- General: Extract review sequence is not converted with same names [\#3076](https://github.com/pypeclub/OpenPype/pull/3076) +- Webpublisher: Use variant value [\#3068](https://github.com/pypeclub/OpenPype/pull/3068) +- Nuke: Add aov matching even for remainder and prerender [\#3060](https://github.com/pypeclub/OpenPype/pull/3060) +- Fix support for Renderman in Maya [\#3006](https://github.com/pypeclub/OpenPype/pull/3006) + +**🔀 Refactored code** + +- Avalon repo removed from Jobs workflow [\#3193](https://github.com/pypeclub/OpenPype/pull/3193) +- General: Remove remaining imports from avalon [\#3130](https://github.com/pypeclub/OpenPype/pull/3130) +- General: Move mongo db logic and remove avalon repository [\#3066](https://github.com/pypeclub/OpenPype/pull/3066) +- General: Move host install [\#3009](https://github.com/pypeclub/OpenPype/pull/3009) + +**Merged pull requests:** + +- Harmony: message length in 21.1 [\#3257](https://github.com/pypeclub/OpenPype/pull/3257) +- Harmony: 21.1 fix [\#3249](https://github.com/pypeclub/OpenPype/pull/3249) +- Maya: added jpg to filter for Image Plane Loader [\#3223](https://github.com/pypeclub/OpenPype/pull/3223) +- Webpublisher: replace space by underscore in subset names [\#3160](https://github.com/pypeclub/OpenPype/pull/3160) +- StandalonePublisher: removed Extract Background plugins [\#3093](https://github.com/pypeclub/OpenPype/pull/3093) +- Nuke: added suspend\_publish knob [\#3078](https://github.com/pypeclub/OpenPype/pull/3078) +- Bump async from 2.6.3 to 2.6.4 in /website [\#3065](https://github.com/pypeclub/OpenPype/pull/3065) +- SiteSync: Download all workfile inputs [\#2966](https://github.com/pypeclub/OpenPype/pull/2966) +- Photoshop: New Publisher [\#2933](https://github.com/pypeclub/OpenPype/pull/2933) +- Bump pillow from 9.0.0 to 9.0.1 [\#2880](https://github.com/pypeclub/OpenPype/pull/2880) +- AfterEffects: Allow configuration of default variant via Settings [\#2856](https://github.com/pypeclub/OpenPype/pull/2856) + +## [3.9.8](https://github.com/pypeclub/OpenPype/tree/3.9.8) (2022-05-19) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.7...3.9.8) + +## [3.9.7](https://github.com/pypeclub/OpenPype/tree/3.9.7) (2022-05-11) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.6...3.9.7) + +## [3.9.6](https://github.com/pypeclub/OpenPype/tree/3.9.6) (2022-05-03) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.5...3.9.6) + +## [3.9.5](https://github.com/pypeclub/OpenPype/tree/3.9.5) (2022-04-25) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.4...3.9.5) + +## [3.9.4](https://github.com/pypeclub/OpenPype/tree/3.9.4) (2022-04-15) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.3...3.9.4) + +### 📖 Documentation + +- Documentation: more info about Tasks [\#3062](https://github.com/pypeclub/OpenPype/pull/3062) +- Documentation: Python requirements to 3.7.9 [\#3035](https://github.com/pypeclub/OpenPype/pull/3035) +- Website Docs: Remove unused pages [\#2974](https://github.com/pypeclub/OpenPype/pull/2974) + +**🆕 New features** + +- General: Local overrides for environment variables [\#3045](https://github.com/pypeclub/OpenPype/pull/3045) +- Flame: Flare integration preparation [\#2928](https://github.com/pypeclub/OpenPype/pull/2928) + +**🚀 Enhancements** + +- TVPaint: Added init file for worker to triggers missing sound file dialog [\#3053](https://github.com/pypeclub/OpenPype/pull/3053) +- Ftrack: Custom attributes can be filled in slate values [\#3036](https://github.com/pypeclub/OpenPype/pull/3036) +- Resolve environment variable in google drive credential path [\#3008](https://github.com/pypeclub/OpenPype/pull/3008) + +**🐛 Bug fixes** + +- GitHub: Updated push-protected action in github workflow [\#3064](https://github.com/pypeclub/OpenPype/pull/3064) +- Nuke: Typos in imports from Nuke implementation [\#3061](https://github.com/pypeclub/OpenPype/pull/3061) +- Hotfix: fixing deadline job publishing [\#3059](https://github.com/pypeclub/OpenPype/pull/3059) +- General: Extract Review handle invalid characters for ffmpeg [\#3050](https://github.com/pypeclub/OpenPype/pull/3050) +- Slate Review: Support to keep format on slate concatenation [\#3049](https://github.com/pypeclub/OpenPype/pull/3049) +- Webpublisher: fix processing of workfile [\#3048](https://github.com/pypeclub/OpenPype/pull/3048) +- Ftrack: Integrate ftrack api fix [\#3044](https://github.com/pypeclub/OpenPype/pull/3044) +- Webpublisher - removed wrong hardcoded family [\#3043](https://github.com/pypeclub/OpenPype/pull/3043) +- LibraryLoader: Use current project for asset query in families filter [\#3042](https://github.com/pypeclub/OpenPype/pull/3042) +- SiteSync: Providers ignore that site is disabled [\#3041](https://github.com/pypeclub/OpenPype/pull/3041) +- Unreal: Creator import fixes [\#3040](https://github.com/pypeclub/OpenPype/pull/3040) +- SiteSync: fix transitive alternate sites, fix dropdown in Local Settings [\#3018](https://github.com/pypeclub/OpenPype/pull/3018) +- Maya: invalid review flag on rendered AOVs [\#2915](https://github.com/pypeclub/OpenPype/pull/2915) + +**Merged pull requests:** + +- Deadline: reworked pools assignment [\#3051](https://github.com/pypeclub/OpenPype/pull/3051) +- Houdini: Avoid ImportError on `hdefereval` when Houdini runs without UI [\#2987](https://github.com/pypeclub/OpenPype/pull/2987) + +## [3.9.3](https://github.com/pypeclub/OpenPype/tree/3.9.3) (2022-04-07) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.2...3.9.3) + +### 📖 Documentation + +- Documentation: Added mention of adding My Drive as a root [\#2999](https://github.com/pypeclub/OpenPype/pull/2999) +- Website Docs: Manager Ftrack fix broken links [\#2979](https://github.com/pypeclub/OpenPype/pull/2979) +- Docs: Added MongoDB requirements [\#2951](https://github.com/pypeclub/OpenPype/pull/2951) +- Documentation: New publisher develop docs [\#2896](https://github.com/pypeclub/OpenPype/pull/2896) + +**🆕 New features** + +- Ftrack: Add description integrator [\#3027](https://github.com/pypeclub/OpenPype/pull/3027) +- nuke: bypass baking [\#2992](https://github.com/pypeclub/OpenPype/pull/2992) +- Publishing textures for Unreal [\#2988](https://github.com/pypeclub/OpenPype/pull/2988) +- Maya to Unreal: Static and Skeletal Meshes [\#2978](https://github.com/pypeclub/OpenPype/pull/2978) +- Multiverse: Initial Support [\#2908](https://github.com/pypeclub/OpenPype/pull/2908) + +**🚀 Enhancements** + +- General: default workfile subset name for workfile [\#3011](https://github.com/pypeclub/OpenPype/pull/3011) +- Ftrack: Add more options for note text of integrate ftrack note [\#3025](https://github.com/pypeclub/OpenPype/pull/3025) +- Console Interpreter: Changed how console splitter size are reused on show [\#3016](https://github.com/pypeclub/OpenPype/pull/3016) +- Deadline: Use more suitable name for sequence review logic [\#3015](https://github.com/pypeclub/OpenPype/pull/3015) +- Nuke: add concurrency attr to deadline job [\#3005](https://github.com/pypeclub/OpenPype/pull/3005) +- Photoshop: create image without instance [\#3001](https://github.com/pypeclub/OpenPype/pull/3001) +- TVPaint: Render scene family [\#3000](https://github.com/pypeclub/OpenPype/pull/3000) +- Deadline: priority configurable in Maya jobs [\#2995](https://github.com/pypeclub/OpenPype/pull/2995) +- Nuke: ReviewDataMov Read RAW attribute [\#2985](https://github.com/pypeclub/OpenPype/pull/2985) +- General: `METADATA_KEYS` constant as `frozenset` for optimal immutable lookup [\#2980](https://github.com/pypeclub/OpenPype/pull/2980) +- General: Tools with host filters [\#2975](https://github.com/pypeclub/OpenPype/pull/2975) +- Hero versions: Use custom templates [\#2967](https://github.com/pypeclub/OpenPype/pull/2967) +- Slack: Added configurable maximum file size of review upload to Slack [\#2945](https://github.com/pypeclub/OpenPype/pull/2945) +- NewPublisher: Prepared implementation of optional pyblish plugin [\#2943](https://github.com/pypeclub/OpenPype/pull/2943) +- TVPaint: Extractor to convert PNG into EXR [\#2942](https://github.com/pypeclub/OpenPype/pull/2942) +- Workfiles tool: Save as published workfiles [\#2937](https://github.com/pypeclub/OpenPype/pull/2937) +- Workfiles: Open published workfiles [\#2925](https://github.com/pypeclub/OpenPype/pull/2925) +- General: Default modules loaded dynamically [\#2923](https://github.com/pypeclub/OpenPype/pull/2923) +- CI: change the version bump logic [\#2919](https://github.com/pypeclub/OpenPype/pull/2919) +- Deadline: Add headless argument [\#2916](https://github.com/pypeclub/OpenPype/pull/2916) +- Nuke: Add no-audio Tag [\#2911](https://github.com/pypeclub/OpenPype/pull/2911) +- Ftrack: Fill workfile in custom attribute [\#2906](https://github.com/pypeclub/OpenPype/pull/2906) +- Nuke: improving readability [\#2903](https://github.com/pypeclub/OpenPype/pull/2903) +- Settings UI: Add simple tooltips for settings entities [\#2901](https://github.com/pypeclub/OpenPype/pull/2901) + +**🐛 Bug fixes** + +- General: Fix validate asset docs plug-in filename and class name [\#3029](https://github.com/pypeclub/OpenPype/pull/3029) +- Deadline: Fixed default value of use sequence for review [\#3033](https://github.com/pypeclub/OpenPype/pull/3033) +- Settings UI: Version column can be extended so version are visible [\#3032](https://github.com/pypeclub/OpenPype/pull/3032) +- General: Fix import after movements [\#3028](https://github.com/pypeclub/OpenPype/pull/3028) +- Harmony: Added creating subset name for workfile from template [\#3024](https://github.com/pypeclub/OpenPype/pull/3024) +- AfterEffects: Added creating subset name for workfile from template [\#3023](https://github.com/pypeclub/OpenPype/pull/3023) +- General: Add example addons to ignored [\#3022](https://github.com/pypeclub/OpenPype/pull/3022) +- Maya: Remove missing import [\#3017](https://github.com/pypeclub/OpenPype/pull/3017) +- Ftrack: multiple reviewable componets [\#3012](https://github.com/pypeclub/OpenPype/pull/3012) +- Tray publisher: Fixes after code movement [\#3010](https://github.com/pypeclub/OpenPype/pull/3010) +- Hosts: Remove path existence checks in 'add\_implementation\_envs' [\#3004](https://github.com/pypeclub/OpenPype/pull/3004) +- Nuke: fixing unicode type detection in effect loaders [\#3002](https://github.com/pypeclub/OpenPype/pull/3002) +- Fix - remove doubled dot in workfile created from template [\#2998](https://github.com/pypeclub/OpenPype/pull/2998) +- Nuke: removing redundant Ftrack asset when farm publishing [\#2996](https://github.com/pypeclub/OpenPype/pull/2996) +- PS: fix renaming subset incorrectly in PS [\#2991](https://github.com/pypeclub/OpenPype/pull/2991) +- Fix: Disable setuptools auto discovery [\#2990](https://github.com/pypeclub/OpenPype/pull/2990) +- AEL: fix opening existing workfile if no scene opened [\#2989](https://github.com/pypeclub/OpenPype/pull/2989) +- Maya: Don't do hardlinks on windows for look publishing [\#2986](https://github.com/pypeclub/OpenPype/pull/2986) +- Settings UI: Fix version completer on linux [\#2981](https://github.com/pypeclub/OpenPype/pull/2981) +- Photoshop: Fix creation of subset names in PS review and workfile [\#2969](https://github.com/pypeclub/OpenPype/pull/2969) +- Slack: Added default for review\_upload\_limit for Slack [\#2965](https://github.com/pypeclub/OpenPype/pull/2965) +- General: OIIO conversion for ffmeg can handle sequences [\#2958](https://github.com/pypeclub/OpenPype/pull/2958) +- Settings: Conditional dictionary avoid invalid logs [\#2956](https://github.com/pypeclub/OpenPype/pull/2956) +- General: Smaller fixes and typos [\#2950](https://github.com/pypeclub/OpenPype/pull/2950) +- LogViewer: Don't refresh on initialization [\#2949](https://github.com/pypeclub/OpenPype/pull/2949) +- nuke: python3 compatibility issue with `iteritems` [\#2948](https://github.com/pypeclub/OpenPype/pull/2948) +- General: anatomy data with correct task short key [\#2947](https://github.com/pypeclub/OpenPype/pull/2947) +- SceneInventory: Fix imports in UI [\#2944](https://github.com/pypeclub/OpenPype/pull/2944) +- Slack: add generic exception [\#2941](https://github.com/pypeclub/OpenPype/pull/2941) +- General: Python specific vendor paths on env injection [\#2939](https://github.com/pypeclub/OpenPype/pull/2939) +- General: More fail safe delete old versions [\#2936](https://github.com/pypeclub/OpenPype/pull/2936) +- Settings UI: Collapsed of collapsible wrapper works as expected [\#2934](https://github.com/pypeclub/OpenPype/pull/2934) +- Maya: Do not pass `set` to maya commands \(fixes support for older maya versions\) [\#2932](https://github.com/pypeclub/OpenPype/pull/2932) +- General: Don't print log record on OSError [\#2926](https://github.com/pypeclub/OpenPype/pull/2926) +- Hiero: Fix import of 'register\_event\_callback' [\#2924](https://github.com/pypeclub/OpenPype/pull/2924) +- Flame: centos related debugging [\#2922](https://github.com/pypeclub/OpenPype/pull/2922) +- Ftrack: Missing Ftrack id after editorial publish [\#2905](https://github.com/pypeclub/OpenPype/pull/2905) +- AfterEffects: Fix rendering for single frame in DL [\#2875](https://github.com/pypeclub/OpenPype/pull/2875) + +**🔀 Refactored code** + +- General: Move plugins register and discover [\#2935](https://github.com/pypeclub/OpenPype/pull/2935) +- General: Move Attribute Definitions from pipeline [\#2931](https://github.com/pypeclub/OpenPype/pull/2931) +- General: Removed silo references and terminal splash [\#2927](https://github.com/pypeclub/OpenPype/pull/2927) +- General: Move pipeline constants to OpenPype [\#2918](https://github.com/pypeclub/OpenPype/pull/2918) +- General: Move formatting and workfile functions [\#2914](https://github.com/pypeclub/OpenPype/pull/2914) +- General: Move remaining plugins from avalon [\#2912](https://github.com/pypeclub/OpenPype/pull/2912) + +**Merged pull requests:** + +- Maya: Allow to select invalid camera contents if no cameras found [\#3030](https://github.com/pypeclub/OpenPype/pull/3030) +- Bump paramiko from 2.9.2 to 2.10.1 [\#2973](https://github.com/pypeclub/OpenPype/pull/2973) +- Bump minimist from 1.2.5 to 1.2.6 in /website [\#2954](https://github.com/pypeclub/OpenPype/pull/2954) +- Bump node-forge from 1.2.1 to 1.3.0 in /website [\#2953](https://github.com/pypeclub/OpenPype/pull/2953) +- Maya - added transparency into review creator [\#2952](https://github.com/pypeclub/OpenPype/pull/2952) + +## [3.9.2](https://github.com/pypeclub/OpenPype/tree/3.9.2) (2022-04-04) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.1...3.9.2) + +## [3.9.1](https://github.com/pypeclub/OpenPype/tree/3.9.1) (2022-03-18) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.0...3.9.1) + +**🚀 Enhancements** + +- General: Change how OPENPYPE\_DEBUG value is handled [\#2907](https://github.com/pypeclub/OpenPype/pull/2907) +- nuke: imageio adding ocio config version 1.2 [\#2897](https://github.com/pypeclub/OpenPype/pull/2897) +- Flame: support for comment with xml attribute overrides [\#2892](https://github.com/pypeclub/OpenPype/pull/2892) +- Nuke: ExtractReviewSlate can handle more codes and profiles [\#2879](https://github.com/pypeclub/OpenPype/pull/2879) +- Flame: sequence used for reference video [\#2869](https://github.com/pypeclub/OpenPype/pull/2869) + +**🐛 Bug fixes** + +- General: Fix use of Anatomy roots [\#2904](https://github.com/pypeclub/OpenPype/pull/2904) +- Fixing gap detection in extract review [\#2902](https://github.com/pypeclub/OpenPype/pull/2902) +- Pyblish Pype - ensure current state is correct when entering new group order [\#2899](https://github.com/pypeclub/OpenPype/pull/2899) +- SceneInventory: Fix import of load function [\#2894](https://github.com/pypeclub/OpenPype/pull/2894) +- Harmony - fixed creator issue [\#2891](https://github.com/pypeclub/OpenPype/pull/2891) +- General: Remove forgotten use of avalon Creator [\#2885](https://github.com/pypeclub/OpenPype/pull/2885) +- General: Avoid circular import [\#2884](https://github.com/pypeclub/OpenPype/pull/2884) +- Fixes for attaching loaded containers \(\#2837\) [\#2874](https://github.com/pypeclub/OpenPype/pull/2874) +- Maya: Deformer node ids validation plugin [\#2826](https://github.com/pypeclub/OpenPype/pull/2826) +- Flame Babypublisher optimalization [\#2806](https://github.com/pypeclub/OpenPype/pull/2806) +- hotfix: OIIO tool path - add extension on windows [\#2618](https://github.com/pypeclub/OpenPype/pull/2618) + +**🔀 Refactored code** + +- General: Reduce style usage to OpenPype repository [\#2889](https://github.com/pypeclub/OpenPype/pull/2889) +- General: Move loader logic from avalon to openpype [\#2886](https://github.com/pypeclub/OpenPype/pull/2886) + +## [3.9.0](https://github.com/pypeclub/OpenPype/tree/3.9.0) (2022-03-14) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.8.2...3.9.0) + +**Deprecated:** + +- Houdini: Remove unused code [\#2779](https://github.com/pypeclub/OpenPype/pull/2779) +- Loader: Remove default family states for hosts from code [\#2706](https://github.com/pypeclub/OpenPype/pull/2706) +- AssetCreator: Remove the tool [\#2845](https://github.com/pypeclub/OpenPype/pull/2845) + +### 📖 Documentation + +- Documentation: fixed broken links [\#2799](https://github.com/pypeclub/OpenPype/pull/2799) +- Documentation: broken link fix [\#2785](https://github.com/pypeclub/OpenPype/pull/2785) +- Documentation: link fixes [\#2772](https://github.com/pypeclub/OpenPype/pull/2772) +- Update docusaurus to latest version [\#2760](https://github.com/pypeclub/OpenPype/pull/2760) +- Various testing updates [\#2726](https://github.com/pypeclub/OpenPype/pull/2726) +- documentation: add example to `repack-version` command [\#2669](https://github.com/pypeclub/OpenPype/pull/2669) +- Update docusaurus [\#2639](https://github.com/pypeclub/OpenPype/pull/2639) +- Documentation: Fixed relative links [\#2621](https://github.com/pypeclub/OpenPype/pull/2621) +- Documentation: Change Photoshop & AfterEffects plugin path [\#2878](https://github.com/pypeclub/OpenPype/pull/2878) + +**🆕 New features** + +- Flame: loading clips to reels [\#2622](https://github.com/pypeclub/OpenPype/pull/2622) +- General: Store settings by OpenPype version [\#2570](https://github.com/pypeclub/OpenPype/pull/2570) + +**🚀 Enhancements** + +- New: Validation exceptions [\#2841](https://github.com/pypeclub/OpenPype/pull/2841) +- General: Set context environments for non host applications [\#2803](https://github.com/pypeclub/OpenPype/pull/2803) +- Houdini: Remove duplicate ValidateOutputNode plug-in [\#2780](https://github.com/pypeclub/OpenPype/pull/2780) +- Tray publisher: New Tray Publisher host \(beta\) [\#2778](https://github.com/pypeclub/OpenPype/pull/2778) +- Slack: Added regex for filtering on subset names [\#2775](https://github.com/pypeclub/OpenPype/pull/2775) +- Houdini: Implement Reset Frame Range [\#2770](https://github.com/pypeclub/OpenPype/pull/2770) +- Pyblish Pype: Remove redundant new line in installed fonts printing [\#2758](https://github.com/pypeclub/OpenPype/pull/2758) +- Flame: use Shot Name on segment for asset name [\#2751](https://github.com/pypeclub/OpenPype/pull/2751) +- Flame: adding validator source clip [\#2746](https://github.com/pypeclub/OpenPype/pull/2746) +- Work Files: Preserve subversion comment of current filename by default [\#2734](https://github.com/pypeclub/OpenPype/pull/2734) +- Maya: set Deadline job/batch name to original source workfile name instead of published workfile [\#2733](https://github.com/pypeclub/OpenPype/pull/2733) +- Ftrack: Disable ftrack module by default [\#2732](https://github.com/pypeclub/OpenPype/pull/2732) +- Project Manager: Disable add task, add asset and save button when not in a project [\#2727](https://github.com/pypeclub/OpenPype/pull/2727) +- dropbox handle big file [\#2718](https://github.com/pypeclub/OpenPype/pull/2718) +- Fusion Move PR: Minor tweaks to Fusion integration [\#2716](https://github.com/pypeclub/OpenPype/pull/2716) +- RoyalRender: Minor enhancements [\#2700](https://github.com/pypeclub/OpenPype/pull/2700) +- Nuke: prerender with review knob [\#2691](https://github.com/pypeclub/OpenPype/pull/2691) +- Maya configurable unit validator [\#2680](https://github.com/pypeclub/OpenPype/pull/2680) +- General: Add settings for CleanUpFarm and disable the plugin by default [\#2679](https://github.com/pypeclub/OpenPype/pull/2679) +- Project Manager: Only allow scroll wheel edits when spinbox is active [\#2678](https://github.com/pypeclub/OpenPype/pull/2678) +- Ftrack: Sync description to assets [\#2670](https://github.com/pypeclub/OpenPype/pull/2670) +- Houdini: Moved to OpenPype [\#2658](https://github.com/pypeclub/OpenPype/pull/2658) +- Maya: Move implementation to OpenPype [\#2649](https://github.com/pypeclub/OpenPype/pull/2649) +- General: FFmpeg conversion also check attribute string length [\#2635](https://github.com/pypeclub/OpenPype/pull/2635) +- Houdini: Load Arnold .ass procedurals into Houdini [\#2606](https://github.com/pypeclub/OpenPype/pull/2606) +- Deadline: Simplify GlobalJobPreLoad logic [\#2605](https://github.com/pypeclub/OpenPype/pull/2605) +- Houdini: Implement Arnold .ass standin extraction from Houdini \(also support .ass.gz\) [\#2603](https://github.com/pypeclub/OpenPype/pull/2603) +- New Publisher: New features and preparations for new standalone publisher [\#2556](https://github.com/pypeclub/OpenPype/pull/2556) +- Fix Maya 2022 Python 3 compatibility [\#2445](https://github.com/pypeclub/OpenPype/pull/2445) +- TVPaint: Use new publisher exceptions in validators [\#2435](https://github.com/pypeclub/OpenPype/pull/2435) +- Harmony: Added new style validations for New Publisher [\#2434](https://github.com/pypeclub/OpenPype/pull/2434) +- Aftereffects: New style validations for New publisher [\#2430](https://github.com/pypeclub/OpenPype/pull/2430) +- Farm publishing: New cleanup plugin for Maya renders on farm [\#2390](https://github.com/pypeclub/OpenPype/pull/2390) +- General: Subset name filtering in ExtractReview outpus [\#2872](https://github.com/pypeclub/OpenPype/pull/2872) +- NewPublisher: Descriptions and Icons in creator dialog [\#2867](https://github.com/pypeclub/OpenPype/pull/2867) +- NewPublisher: Changing task on publishing instance [\#2863](https://github.com/pypeclub/OpenPype/pull/2863) +- TrayPublisher: Choose project widget is more clear [\#2859](https://github.com/pypeclub/OpenPype/pull/2859) +- Maya: add loaded containers to published instance [\#2837](https://github.com/pypeclub/OpenPype/pull/2837) +- Ftrack: Can sync fps as string [\#2836](https://github.com/pypeclub/OpenPype/pull/2836) +- General: Custom function for find executable [\#2822](https://github.com/pypeclub/OpenPype/pull/2822) +- General: Color dialog UI fixes [\#2817](https://github.com/pypeclub/OpenPype/pull/2817) +- global: letter box calculated on output as last process [\#2812](https://github.com/pypeclub/OpenPype/pull/2812) +- Nuke: adding Reformat to baking mov plugin [\#2811](https://github.com/pypeclub/OpenPype/pull/2811) +- Manager: Update all to latest button [\#2805](https://github.com/pypeclub/OpenPype/pull/2805) +- Houdini: Move Houdini Save Current File to beginning of ExtractorOrder [\#2747](https://github.com/pypeclub/OpenPype/pull/2747) +- Global: adding studio name/code to anatomy template formatting data [\#2630](https://github.com/pypeclub/OpenPype/pull/2630) + +**🐛 Bug fixes** + +- Settings UI: Search case sensitivity [\#2810](https://github.com/pypeclub/OpenPype/pull/2810) +- resolve: fixing fusion module loading [\#2802](https://github.com/pypeclub/OpenPype/pull/2802) +- Ftrack: Unset task ids from asset versions before tasks are removed [\#2800](https://github.com/pypeclub/OpenPype/pull/2800) +- Slack: fail gracefully if slack exception [\#2798](https://github.com/pypeclub/OpenPype/pull/2798) +- Flame: Fix version string in default settings [\#2783](https://github.com/pypeclub/OpenPype/pull/2783) +- After Effects: Fix typo in name `afftereffects` -\> `aftereffects` [\#2768](https://github.com/pypeclub/OpenPype/pull/2768) +- Houdini: Fix open last workfile [\#2767](https://github.com/pypeclub/OpenPype/pull/2767) +- Avoid renaming udim indexes [\#2765](https://github.com/pypeclub/OpenPype/pull/2765) +- Maya: Fix `unique_namespace` when in an namespace that is empty [\#2759](https://github.com/pypeclub/OpenPype/pull/2759) +- Loader UI: Fix right click in representation widget [\#2757](https://github.com/pypeclub/OpenPype/pull/2757) +- Harmony: Rendering in Deadline didn't work in other machines than submitter [\#2754](https://github.com/pypeclub/OpenPype/pull/2754) +- Aftereffects 2022 and Deadline [\#2748](https://github.com/pypeclub/OpenPype/pull/2748) +- Flame: bunch of bugs [\#2745](https://github.com/pypeclub/OpenPype/pull/2745) +- Maya: Save current scene on workfile publish [\#2744](https://github.com/pypeclub/OpenPype/pull/2744) +- Version Up: Preserve parts of filename after version number \(like subversion\) on version\_up [\#2741](https://github.com/pypeclub/OpenPype/pull/2741) +- Loader UI: Multiple asset selection and underline colors fixed [\#2731](https://github.com/pypeclub/OpenPype/pull/2731) +- General: Fix loading of unused chars in xml format [\#2729](https://github.com/pypeclub/OpenPype/pull/2729) +- TVPaint: Set objectName with members [\#2725](https://github.com/pypeclub/OpenPype/pull/2725) +- General: Don't use 'objectName' from loaded references [\#2715](https://github.com/pypeclub/OpenPype/pull/2715) +- Settings: Studio Project anatomy is queried using right keys [\#2711](https://github.com/pypeclub/OpenPype/pull/2711) +- Local Settings: Additional applications don't break UI [\#2710](https://github.com/pypeclub/OpenPype/pull/2710) +- Maya: Remove some unused code [\#2709](https://github.com/pypeclub/OpenPype/pull/2709) +- Houdini: Fix refactor of Houdini host move for CreateArnoldAss [\#2704](https://github.com/pypeclub/OpenPype/pull/2704) +- LookAssigner: Fix imports after moving code to OpenPype repository [\#2701](https://github.com/pypeclub/OpenPype/pull/2701) +- Multiple hosts: unify menu style across hosts [\#2693](https://github.com/pypeclub/OpenPype/pull/2693) +- Maya Redshift fixes [\#2692](https://github.com/pypeclub/OpenPype/pull/2692) +- Maya: fix fps validation popup [\#2685](https://github.com/pypeclub/OpenPype/pull/2685) +- Houdini Explicitly collect correct frame name even in case of single frame render when `frameStart` is provided [\#2676](https://github.com/pypeclub/OpenPype/pull/2676) +- hiero: fix effect collector name and order [\#2673](https://github.com/pypeclub/OpenPype/pull/2673) +- Maya: Fix menu callbacks [\#2671](https://github.com/pypeclub/OpenPype/pull/2671) +- hiero: removing obsolete unsupported plugin [\#2667](https://github.com/pypeclub/OpenPype/pull/2667) +- Launcher: Fix access to 'data' attribute on actions [\#2659](https://github.com/pypeclub/OpenPype/pull/2659) +- Maya `vrscene` loader fixes [\#2633](https://github.com/pypeclub/OpenPype/pull/2633) +- Houdini: fix usd family in loader and integrators [\#2631](https://github.com/pypeclub/OpenPype/pull/2631) +- Maya: Add only reference node to look family container like with other families [\#2508](https://github.com/pypeclub/OpenPype/pull/2508) +- General: Missing time function [\#2877](https://github.com/pypeclub/OpenPype/pull/2877) +- Deadline: Fix plugin name for tile assemble [\#2868](https://github.com/pypeclub/OpenPype/pull/2868) +- Nuke: gizmo precollect fix [\#2866](https://github.com/pypeclub/OpenPype/pull/2866) +- General: Fix hardlink for windows [\#2864](https://github.com/pypeclub/OpenPype/pull/2864) +- General: ffmpeg was crashing on slate merge [\#2860](https://github.com/pypeclub/OpenPype/pull/2860) +- WebPublisher: Video file was published with one too many frame [\#2858](https://github.com/pypeclub/OpenPype/pull/2858) +- New Publisher: Error dialog got right styles [\#2857](https://github.com/pypeclub/OpenPype/pull/2857) +- General: Fix getattr clalback on dynamic modules [\#2855](https://github.com/pypeclub/OpenPype/pull/2855) +- Nuke: slate resolution to input video resolution [\#2853](https://github.com/pypeclub/OpenPype/pull/2853) +- WebPublisher: Fix username stored in DB [\#2852](https://github.com/pypeclub/OpenPype/pull/2852) +- WebPublisher: Fix wrong number of frames for video file [\#2851](https://github.com/pypeclub/OpenPype/pull/2851) +- Nuke: Fix family test in validate\_write\_legacy to work with stillImage [\#2847](https://github.com/pypeclub/OpenPype/pull/2847) +- Nuke: fix multiple baking profile farm publishing [\#2842](https://github.com/pypeclub/OpenPype/pull/2842) +- Blender: Fixed parameters for FBX export of the camera [\#2840](https://github.com/pypeclub/OpenPype/pull/2840) +- Maya: Stop creation of reviews for Cryptomattes [\#2832](https://github.com/pypeclub/OpenPype/pull/2832) +- Deadline: Remove recreated event [\#2828](https://github.com/pypeclub/OpenPype/pull/2828) +- Deadline: Added missing events folder [\#2827](https://github.com/pypeclub/OpenPype/pull/2827) +- Settings: Missing document with OP versions may break start of OpenPype [\#2825](https://github.com/pypeclub/OpenPype/pull/2825) +- Deadline: more detailed temp file name for environment json [\#2824](https://github.com/pypeclub/OpenPype/pull/2824) +- General: Host name was formed from obsolete code [\#2821](https://github.com/pypeclub/OpenPype/pull/2821) +- Settings UI: Fix "Apply from" action [\#2820](https://github.com/pypeclub/OpenPype/pull/2820) +- Ftrack: Job killer with missing user [\#2819](https://github.com/pypeclub/OpenPype/pull/2819) +- Nuke: Use AVALON\_APP to get value for "app" key [\#2818](https://github.com/pypeclub/OpenPype/pull/2818) +- StandalonePublisher: use dynamic groups in subset names [\#2816](https://github.com/pypeclub/OpenPype/pull/2816) + +**🔀 Refactored code** + +- Ftrack: Moved module one hierarchy level higher [\#2792](https://github.com/pypeclub/OpenPype/pull/2792) +- SyncServer: Moved module one hierarchy level higher [\#2791](https://github.com/pypeclub/OpenPype/pull/2791) +- Royal render: Move module one hierarchy level higher [\#2790](https://github.com/pypeclub/OpenPype/pull/2790) +- Deadline: Move module one hierarchy level higher [\#2789](https://github.com/pypeclub/OpenPype/pull/2789) +- Refactor: move webserver tool to openpype [\#2876](https://github.com/pypeclub/OpenPype/pull/2876) +- General: Move create logic from avalon to OpenPype [\#2854](https://github.com/pypeclub/OpenPype/pull/2854) +- General: Add vendors from avalon [\#2848](https://github.com/pypeclub/OpenPype/pull/2848) +- General: Basic event system [\#2846](https://github.com/pypeclub/OpenPype/pull/2846) +- General: Move change context functions [\#2839](https://github.com/pypeclub/OpenPype/pull/2839) +- Tools: Don't use avalon tools code [\#2829](https://github.com/pypeclub/OpenPype/pull/2829) +- Move Unreal Implementation to OpenPype [\#2823](https://github.com/pypeclub/OpenPype/pull/2823) +- General: Extract template formatting from anatomy [\#2766](https://github.com/pypeclub/OpenPype/pull/2766) + +**Merged pull requests:** + +- Fusion: Moved implementation into OpenPype [\#2713](https://github.com/pypeclub/OpenPype/pull/2713) +- TVPaint: Plugin build without dependencies [\#2705](https://github.com/pypeclub/OpenPype/pull/2705) +- Webpublisher: Photoshop create a beauty png [\#2689](https://github.com/pypeclub/OpenPype/pull/2689) +- Ftrack: Hierarchical attributes are queried properly [\#2682](https://github.com/pypeclub/OpenPype/pull/2682) +- Maya: Add Validate Frame Range settings [\#2661](https://github.com/pypeclub/OpenPype/pull/2661) +- Harmony: move to Openpype [\#2657](https://github.com/pypeclub/OpenPype/pull/2657) +- Maya: cleanup duplicate rendersetup code [\#2642](https://github.com/pypeclub/OpenPype/pull/2642) +- Deadline: Be able to pass Mongo url to job [\#2616](https://github.com/pypeclub/OpenPype/pull/2616) + +## [3.8.2](https://github.com/pypeclub/OpenPype/tree/3.8.2) (2022-02-07) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.8.1...3.8.2) + +### 📖 Documentation + +- Cosmetics: Fix common typos in openpype/website [\#2617](https://github.com/pypeclub/OpenPype/pull/2617) + +**🚀 Enhancements** + +- TVPaint: Image loaders also work on review family [\#2638](https://github.com/pypeclub/OpenPype/pull/2638) +- General: Project backup tools [\#2629](https://github.com/pypeclub/OpenPype/pull/2629) +- nuke: adding clear button to write nodes [\#2627](https://github.com/pypeclub/OpenPype/pull/2627) +- Ftrack: Family to Asset type mapping is in settings [\#2602](https://github.com/pypeclub/OpenPype/pull/2602) +- Nuke: load color space from representation data [\#2576](https://github.com/pypeclub/OpenPype/pull/2576) + +**🐛 Bug fixes** + +- Fix pulling of cx\_freeze 6.10 [\#2628](https://github.com/pypeclub/OpenPype/pull/2628) +- Global: fix broken otio review extractor [\#2590](https://github.com/pypeclub/OpenPype/pull/2590) + +**Merged pull requests:** + +- WebPublisher: fix instance duplicates [\#2641](https://github.com/pypeclub/OpenPype/pull/2641) +- Fix - safer pulling of task name for webpublishing from PS [\#2613](https://github.com/pypeclub/OpenPype/pull/2613) + +## [3.8.1](https://github.com/pypeclub/OpenPype/tree/3.8.1) (2022-02-01) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.8.0...3.8.1) + +**🚀 Enhancements** + +- Webpublisher: Thumbnail extractor [\#2600](https://github.com/pypeclub/OpenPype/pull/2600) +- Loader: Allow to toggle default family filters between "include" or "exclude" filtering [\#2541](https://github.com/pypeclub/OpenPype/pull/2541) +- Launcher: Added context menu to to skip opening last workfile [\#2536](https://github.com/pypeclub/OpenPype/pull/2536) +- Unreal: JSON Layout Loading support [\#2066](https://github.com/pypeclub/OpenPype/pull/2066) + +**🐛 Bug fixes** + +- Release/3.8.0 [\#2619](https://github.com/pypeclub/OpenPype/pull/2619) +- Settings: Enum does not store empty string if has single item to select [\#2615](https://github.com/pypeclub/OpenPype/pull/2615) +- switch distutils to sysconfig for `get_platform()` [\#2594](https://github.com/pypeclub/OpenPype/pull/2594) +- Fix poetry index and speedcopy update [\#2589](https://github.com/pypeclub/OpenPype/pull/2589) +- Webpublisher: Fix - subset names from processed .psd used wrong value for task [\#2586](https://github.com/pypeclub/OpenPype/pull/2586) +- `vrscene` creator Deadline webservice URL handling [\#2580](https://github.com/pypeclub/OpenPype/pull/2580) +- global: track name was failing if duplicated root word in name [\#2568](https://github.com/pypeclub/OpenPype/pull/2568) +- Validate Maya Rig produces no cycle errors [\#2484](https://github.com/pypeclub/OpenPype/pull/2484) + +**Merged pull requests:** + +- Bump pillow from 8.4.0 to 9.0.0 [\#2595](https://github.com/pypeclub/OpenPype/pull/2595) +- Webpublisher: Skip version collect [\#2591](https://github.com/pypeclub/OpenPype/pull/2591) +- build\(deps\): bump pillow from 8.4.0 to 9.0.0 [\#2523](https://github.com/pypeclub/OpenPype/pull/2523) + +## [3.8.0](https://github.com/pypeclub/OpenPype/tree/3.8.0) (2022-01-24) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.7.0...3.8.0) + +### 📖 Documentation + +- Variable in docs renamed to proper name [\#2546](https://github.com/pypeclub/OpenPype/pull/2546) + +**🆕 New features** + +- Flame: extracting segments with trans-coding [\#2547](https://github.com/pypeclub/OpenPype/pull/2547) +- Maya : V-Ray Proxy - load all ABC files via proxy [\#2544](https://github.com/pypeclub/OpenPype/pull/2544) +- Maya to Unreal: Extended static mesh workflow [\#2537](https://github.com/pypeclub/OpenPype/pull/2537) +- Flame: collecting publishable instances [\#2519](https://github.com/pypeclub/OpenPype/pull/2519) +- Flame: create publishable clips [\#2495](https://github.com/pypeclub/OpenPype/pull/2495) +- Flame: OpenTimelineIO Export Modul [\#2398](https://github.com/pypeclub/OpenPype/pull/2398) + +**🚀 Enhancements** + +- Webpublisher: Moved error at the beginning of the log [\#2559](https://github.com/pypeclub/OpenPype/pull/2559) +- Ftrack: Use ApplicationManager to get DJV path [\#2558](https://github.com/pypeclub/OpenPype/pull/2558) +- Webpublisher: Added endpoint to reprocess batch through UI [\#2555](https://github.com/pypeclub/OpenPype/pull/2555) +- Settings: PathInput strip passed string [\#2550](https://github.com/pypeclub/OpenPype/pull/2550) +- Global: Exctract Review anatomy fill data with output name [\#2548](https://github.com/pypeclub/OpenPype/pull/2548) +- Cosmetics: Clean up some cosmetics / typos [\#2542](https://github.com/pypeclub/OpenPype/pull/2542) +- General: Validate if current process OpenPype version is requested version [\#2529](https://github.com/pypeclub/OpenPype/pull/2529) +- General: Be able to use anatomy data in ffmpeg output arguments [\#2525](https://github.com/pypeclub/OpenPype/pull/2525) +- Expose toggle publish plug-in settings for Maya Look Shading Engine Naming [\#2521](https://github.com/pypeclub/OpenPype/pull/2521) +- Photoshop: Move implementation to OpenPype [\#2510](https://github.com/pypeclub/OpenPype/pull/2510) +- TimersManager: Move module one hierarchy higher [\#2501](https://github.com/pypeclub/OpenPype/pull/2501) +- Slack: notifications are sent with Openpype logo and bot name [\#2499](https://github.com/pypeclub/OpenPype/pull/2499) +- Slack: Add review to notification message [\#2498](https://github.com/pypeclub/OpenPype/pull/2498) +- Ftrack: Event handlers settings [\#2496](https://github.com/pypeclub/OpenPype/pull/2496) +- Tools: Fix style and modality of errors in loader and creator [\#2489](https://github.com/pypeclub/OpenPype/pull/2489) +- Maya: Collect 'fps' animation data only for "review" instances [\#2486](https://github.com/pypeclub/OpenPype/pull/2486) +- Project Manager: Remove project button cleanup [\#2482](https://github.com/pypeclub/OpenPype/pull/2482) +- Tools: Be able to change models of tasks and assets widgets [\#2475](https://github.com/pypeclub/OpenPype/pull/2475) +- Publish pype: Reduce publish process defering [\#2464](https://github.com/pypeclub/OpenPype/pull/2464) +- Maya: Improve speed of Collect History logic [\#2460](https://github.com/pypeclub/OpenPype/pull/2460) +- Maya: Validate Rig Controllers - fix Error: in script editor [\#2459](https://github.com/pypeclub/OpenPype/pull/2459) +- Maya: Validate NGONs simplify and speed-up [\#2458](https://github.com/pypeclub/OpenPype/pull/2458) +- Maya: Optimize Validate Locked Normals speed for dense polymeshes [\#2457](https://github.com/pypeclub/OpenPype/pull/2457) +- Maya: Refactor missing \_get\_reference\_node method [\#2455](https://github.com/pypeclub/OpenPype/pull/2455) +- Houdini: Remove broken unique name counter [\#2450](https://github.com/pypeclub/OpenPype/pull/2450) +- Maya: Improve lib.polyConstraint performance when Select tool is not the active tool context [\#2447](https://github.com/pypeclub/OpenPype/pull/2447) +- General: Validate third party before build [\#2425](https://github.com/pypeclub/OpenPype/pull/2425) +- Maya : add option to not group reference in ReferenceLoader [\#2383](https://github.com/pypeclub/OpenPype/pull/2383) + +**🐛 Bug fixes** + +- AfterEffects: Fix - removed obsolete import [\#2577](https://github.com/pypeclub/OpenPype/pull/2577) +- General: OpenPype version updates [\#2575](https://github.com/pypeclub/OpenPype/pull/2575) +- Ftrack: Delete action revision [\#2563](https://github.com/pypeclub/OpenPype/pull/2563) +- Webpublisher: ftrack shows incorrect user names [\#2560](https://github.com/pypeclub/OpenPype/pull/2560) +- General: Do not validate version if build does not support it [\#2557](https://github.com/pypeclub/OpenPype/pull/2557) +- Webpublisher: Fixed progress reporting [\#2553](https://github.com/pypeclub/OpenPype/pull/2553) +- Fix Maya AssProxyLoader version switch [\#2551](https://github.com/pypeclub/OpenPype/pull/2551) +- General: Fix install thread in igniter [\#2549](https://github.com/pypeclub/OpenPype/pull/2549) +- Houdini: vdbcache family preserve frame numbers on publish integration + enable validate version for Houdini [\#2535](https://github.com/pypeclub/OpenPype/pull/2535) +- Maya: Fix Load VDB to V-Ray [\#2533](https://github.com/pypeclub/OpenPype/pull/2533) +- Maya: ReferenceLoader fix not unique group name error for attach to root [\#2532](https://github.com/pypeclub/OpenPype/pull/2532) +- Maya: namespaced context go back to original namespace when started from inside a namespace [\#2531](https://github.com/pypeclub/OpenPype/pull/2531) +- Fix create zip tool - path argument [\#2522](https://github.com/pypeclub/OpenPype/pull/2522) +- Maya: Fix Extract Look with space in names [\#2518](https://github.com/pypeclub/OpenPype/pull/2518) +- Fix published frame content for sequence starting with 0 [\#2513](https://github.com/pypeclub/OpenPype/pull/2513) +- Maya: reset empty string attributes correctly to "" instead of "None" [\#2506](https://github.com/pypeclub/OpenPype/pull/2506) +- Improve FusionPreLaunch hook errors [\#2505](https://github.com/pypeclub/OpenPype/pull/2505) +- General: Settings work if OpenPypeVersion is available [\#2494](https://github.com/pypeclub/OpenPype/pull/2494) +- General: PYTHONPATH may break OpenPype dependencies [\#2493](https://github.com/pypeclub/OpenPype/pull/2493) +- General: Modules import function output fix [\#2492](https://github.com/pypeclub/OpenPype/pull/2492) +- AE: fix hiding of alert window below Publish [\#2491](https://github.com/pypeclub/OpenPype/pull/2491) +- Workfiles tool: Files widget show files on first show [\#2488](https://github.com/pypeclub/OpenPype/pull/2488) +- General: Custom template paths filter fix [\#2483](https://github.com/pypeclub/OpenPype/pull/2483) +- Loader: Remove always on top flag in tray [\#2480](https://github.com/pypeclub/OpenPype/pull/2480) +- General: Anatomy does not return root envs as unicode [\#2465](https://github.com/pypeclub/OpenPype/pull/2465) +- Maya: Validate Shape Zero do not keep fixed geometry vertices selected/active after repair [\#2456](https://github.com/pypeclub/OpenPype/pull/2456) + +**Merged pull requests:** + +- AfterEffects: Move implementation to OpenPype [\#2543](https://github.com/pypeclub/OpenPype/pull/2543) +- Maya: Remove Maya Look Assigner check on startup [\#2540](https://github.com/pypeclub/OpenPype/pull/2540) +- build\(deps\): bump shelljs from 0.8.4 to 0.8.5 in /website [\#2538](https://github.com/pypeclub/OpenPype/pull/2538) +- build\(deps\): bump follow-redirects from 1.14.4 to 1.14.7 in /website [\#2534](https://github.com/pypeclub/OpenPype/pull/2534) +- Nuke: Merge avalon's implementation into OpenPype [\#2514](https://github.com/pypeclub/OpenPype/pull/2514) +- Maya: Vray fix proxies look assignment [\#2392](https://github.com/pypeclub/OpenPype/pull/2392) +- Bump algoliasearch-helper from 3.4.4 to 3.6.2 in /website [\#2297](https://github.com/pypeclub/OpenPype/pull/2297) + +## [3.7.0](https://github.com/pypeclub/OpenPype/tree/3.7.0) (2022-01-04) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.4...3.7.0) + +**Deprecated:** + +- General: Default modules hierarchy n2 [\#2368](https://github.com/pypeclub/OpenPype/pull/2368) + +### 📖 Documentation + +- docs\[website\]: Add Ellipse Studio \(logo\) as an OpenPype contributor [\#2324](https://github.com/pypeclub/OpenPype/pull/2324) + +**🆕 New features** + +- Settings UI use OpenPype styles [\#2296](https://github.com/pypeclub/OpenPype/pull/2296) +- Store typed version dependencies for workfiles [\#2192](https://github.com/pypeclub/OpenPype/pull/2192) +- OpenPypeV3: add key task type, task shortname and user to path templating construction [\#2157](https://github.com/pypeclub/OpenPype/pull/2157) +- Nuke: Alembic model workflow [\#2140](https://github.com/pypeclub/OpenPype/pull/2140) +- TVPaint: Load workfile from published. [\#1980](https://github.com/pypeclub/OpenPype/pull/1980) + +**🚀 Enhancements** + +- General: Workdir extra folders [\#2462](https://github.com/pypeclub/OpenPype/pull/2462) +- Photoshop: New style validations for New publisher [\#2429](https://github.com/pypeclub/OpenPype/pull/2429) +- General: Environment variables groups [\#2424](https://github.com/pypeclub/OpenPype/pull/2424) +- Unreal: Dynamic menu created in Python [\#2422](https://github.com/pypeclub/OpenPype/pull/2422) +- Settings UI: Hyperlinks to settings [\#2420](https://github.com/pypeclub/OpenPype/pull/2420) +- Modules: JobQueue module moved one hierarchy level higher [\#2419](https://github.com/pypeclub/OpenPype/pull/2419) +- TimersManager: Start timer post launch hook [\#2418](https://github.com/pypeclub/OpenPype/pull/2418) +- General: Run applications as separate processes under linux [\#2408](https://github.com/pypeclub/OpenPype/pull/2408) +- Ftrack: Check existence of object type on recreation [\#2404](https://github.com/pypeclub/OpenPype/pull/2404) +- Enhancement: Global cleanup plugin that explicitly remove paths from context [\#2402](https://github.com/pypeclub/OpenPype/pull/2402) +- General: MongoDB ability to specify replica set groups [\#2401](https://github.com/pypeclub/OpenPype/pull/2401) +- Flame: moving `utility_scripts` to api folder also with `scripts` [\#2385](https://github.com/pypeclub/OpenPype/pull/2385) +- Centos 7 dependency compatibility [\#2384](https://github.com/pypeclub/OpenPype/pull/2384) +- Enhancement: Settings: Use project settings values from another project [\#2382](https://github.com/pypeclub/OpenPype/pull/2382) +- Blender 3: Support auto install for new blender version [\#2377](https://github.com/pypeclub/OpenPype/pull/2377) +- Maya add render image path to settings [\#2375](https://github.com/pypeclub/OpenPype/pull/2375) +- Settings: Webpublisher in hosts enum [\#2367](https://github.com/pypeclub/OpenPype/pull/2367) +- Hiero: python3 compatibility [\#2365](https://github.com/pypeclub/OpenPype/pull/2365) +- Burnins: Be able recognize mxf OPAtom format [\#2361](https://github.com/pypeclub/OpenPype/pull/2361) +- Maya: Add is\_static\_image\_plane and is\_in\_all\_views option in imagePlaneLoader [\#2356](https://github.com/pypeclub/OpenPype/pull/2356) +- Local settings: Copyable studio paths [\#2349](https://github.com/pypeclub/OpenPype/pull/2349) +- Assets Widget: Clear model on project change [\#2345](https://github.com/pypeclub/OpenPype/pull/2345) +- General: OpenPype default modules hierarchy [\#2338](https://github.com/pypeclub/OpenPype/pull/2338) +- TVPaint: Move implementation to OpenPype [\#2336](https://github.com/pypeclub/OpenPype/pull/2336) +- General: FFprobe error exception contain original error message [\#2328](https://github.com/pypeclub/OpenPype/pull/2328) +- Resolve: Add experimental button to menu [\#2325](https://github.com/pypeclub/OpenPype/pull/2325) +- Hiero: Add experimental tools action [\#2323](https://github.com/pypeclub/OpenPype/pull/2323) +- Input links: Cleanup and unification of differences [\#2322](https://github.com/pypeclub/OpenPype/pull/2322) +- General: Don't validate vendor bin with executing them [\#2317](https://github.com/pypeclub/OpenPype/pull/2317) +- General: Multilayer EXRs support [\#2315](https://github.com/pypeclub/OpenPype/pull/2315) +- General: Run process log stderr as info log level [\#2309](https://github.com/pypeclub/OpenPype/pull/2309) +- General: Reduce vendor imports [\#2305](https://github.com/pypeclub/OpenPype/pull/2305) +- Tools: Cleanup of unused classes [\#2304](https://github.com/pypeclub/OpenPype/pull/2304) +- Project Manager: Added ability to delete project [\#2298](https://github.com/pypeclub/OpenPype/pull/2298) +- Ftrack: Synchronize input links [\#2287](https://github.com/pypeclub/OpenPype/pull/2287) +- StandalonePublisher: Remove unused plugin ExtractHarmonyZip [\#2277](https://github.com/pypeclub/OpenPype/pull/2277) +- Ftrack: Support multiple reviews [\#2271](https://github.com/pypeclub/OpenPype/pull/2271) +- Ftrack: Remove unused clean component plugin [\#2269](https://github.com/pypeclub/OpenPype/pull/2269) +- Royal Render: Support for rr channels in separate dirs [\#2268](https://github.com/pypeclub/OpenPype/pull/2268) +- Houdini: Add experimental tools action [\#2267](https://github.com/pypeclub/OpenPype/pull/2267) +- Nuke: extract baked review videos presets [\#2248](https://github.com/pypeclub/OpenPype/pull/2248) +- TVPaint: Workers rendering [\#2209](https://github.com/pypeclub/OpenPype/pull/2209) +- OpenPypeV3: Add key parent asset to path templating construction [\#2186](https://github.com/pypeclub/OpenPype/pull/2186) + +**🐛 Bug fixes** + +- TVPaint: Create render layer dialog is in front [\#2471](https://github.com/pypeclub/OpenPype/pull/2471) +- Short Pyblish plugin path [\#2428](https://github.com/pypeclub/OpenPype/pull/2428) +- PS: Introduced settings for invalid characters to use in ValidateNaming plugin [\#2417](https://github.com/pypeclub/OpenPype/pull/2417) +- Settings UI: Breadcrumbs path does not create new entities [\#2416](https://github.com/pypeclub/OpenPype/pull/2416) +- AfterEffects: Variant 2022 is in defaults but missing in schemas [\#2412](https://github.com/pypeclub/OpenPype/pull/2412) +- Nuke: baking representations was not additive [\#2406](https://github.com/pypeclub/OpenPype/pull/2406) +- General: Fix access to environments from default settings [\#2403](https://github.com/pypeclub/OpenPype/pull/2403) +- Fix: Placeholder Input color set fix [\#2399](https://github.com/pypeclub/OpenPype/pull/2399) +- Settings: Fix state change of wrapper label [\#2396](https://github.com/pypeclub/OpenPype/pull/2396) +- Flame: fix ftrack publisher [\#2381](https://github.com/pypeclub/OpenPype/pull/2381) +- hiero: solve custom ocio path [\#2379](https://github.com/pypeclub/OpenPype/pull/2379) +- hiero: fix workio and flatten [\#2378](https://github.com/pypeclub/OpenPype/pull/2378) +- Nuke: fixing menu re-drawing during context change [\#2374](https://github.com/pypeclub/OpenPype/pull/2374) +- Webpublisher: Fix assignment of families of TVpaint instances [\#2373](https://github.com/pypeclub/OpenPype/pull/2373) +- Nuke: fixing node name based on switched asset name [\#2369](https://github.com/pypeclub/OpenPype/pull/2369) +- JobQueue: Fix loading of settings [\#2362](https://github.com/pypeclub/OpenPype/pull/2362) +- Tools: Placeholder color [\#2359](https://github.com/pypeclub/OpenPype/pull/2359) +- Launcher: Minimize button on MacOs [\#2355](https://github.com/pypeclub/OpenPype/pull/2355) +- StandalonePublisher: Fix import of constant [\#2354](https://github.com/pypeclub/OpenPype/pull/2354) +- Houdini: Fix HDA creation [\#2350](https://github.com/pypeclub/OpenPype/pull/2350) +- Adobe products show issue [\#2347](https://github.com/pypeclub/OpenPype/pull/2347) +- Maya Look Assigner: Fix Python 3 compatibility [\#2343](https://github.com/pypeclub/OpenPype/pull/2343) +- Remove wrongly used host for hook [\#2342](https://github.com/pypeclub/OpenPype/pull/2342) +- Tools: Use Qt context on tools show [\#2340](https://github.com/pypeclub/OpenPype/pull/2340) +- Flame: Fix default argument value in custom dictionary [\#2339](https://github.com/pypeclub/OpenPype/pull/2339) +- Timers Manager: Disable auto stop timer on linux platform [\#2334](https://github.com/pypeclub/OpenPype/pull/2334) +- nuke: bake preset single input exception [\#2331](https://github.com/pypeclub/OpenPype/pull/2331) +- Hiero: fixing multiple templates at a hierarchy parent [\#2330](https://github.com/pypeclub/OpenPype/pull/2330) +- Fix - provider icons are pulled from a folder [\#2326](https://github.com/pypeclub/OpenPype/pull/2326) +- InputLinks: Typo in "inputLinks" key [\#2314](https://github.com/pypeclub/OpenPype/pull/2314) +- Deadline timeout and logging [\#2312](https://github.com/pypeclub/OpenPype/pull/2312) +- nuke: do not multiply representation on class method [\#2311](https://github.com/pypeclub/OpenPype/pull/2311) +- Workfiles tool: Fix task formatting [\#2306](https://github.com/pypeclub/OpenPype/pull/2306) +- Delivery: Fix delivery paths created on windows [\#2302](https://github.com/pypeclub/OpenPype/pull/2302) +- Maya: Deadline - fix limit groups [\#2295](https://github.com/pypeclub/OpenPype/pull/2295) +- Royal Render: Fix plugin order and OpenPype auto-detection [\#2291](https://github.com/pypeclub/OpenPype/pull/2291) +- New Publisher: Fix mapping of indexes [\#2285](https://github.com/pypeclub/OpenPype/pull/2285) +- Alternate site for site sync doesnt work for sequences [\#2284](https://github.com/pypeclub/OpenPype/pull/2284) +- FFmpeg: Execute ffprobe using list of arguments instead of string command [\#2281](https://github.com/pypeclub/OpenPype/pull/2281) +- Nuke: Anatomy fill data use task as dictionary [\#2278](https://github.com/pypeclub/OpenPype/pull/2278) +- Bug: fix variable name \_asset\_id in workfiles application [\#2274](https://github.com/pypeclub/OpenPype/pull/2274) +- Version handling fixes [\#2272](https://github.com/pypeclub/OpenPype/pull/2272) + +**Merged pull requests:** + +- Maya: Replaced PATH usage with vendored oiio path for maketx utility [\#2405](https://github.com/pypeclub/OpenPype/pull/2405) +- \[Fix\]\[MAYA\] Handle message type attribute within CollectLook [\#2394](https://github.com/pypeclub/OpenPype/pull/2394) +- Add validator to check correct version of extension for PS and AE [\#2387](https://github.com/pypeclub/OpenPype/pull/2387) +- Maya: configurable model top level validation [\#2321](https://github.com/pypeclub/OpenPype/pull/2321) +- Create test publish class for After Effects [\#2270](https://github.com/pypeclub/OpenPype/pull/2270) + +## [3.6.4](https://github.com/pypeclub/OpenPype/tree/3.6.4) (2021-11-23) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.3...3.6.4) + +**🐛 Bug fixes** + +- Nuke: inventory update removes all loaded read nodes [\#2294](https://github.com/pypeclub/OpenPype/pull/2294) + +## [3.6.3](https://github.com/pypeclub/OpenPype/tree/3.6.3) (2021-11-19) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.2...3.6.3) + +**🐛 Bug fixes** + +- Deadline: Fix publish targets [\#2280](https://github.com/pypeclub/OpenPype/pull/2280) + +## [3.6.2](https://github.com/pypeclub/OpenPype/tree/3.6.2) (2021-11-18) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.1...3.6.2) + +**🚀 Enhancements** + +- Tools: Assets widget [\#2265](https://github.com/pypeclub/OpenPype/pull/2265) +- SceneInventory: Choose loader in asset switcher [\#2262](https://github.com/pypeclub/OpenPype/pull/2262) +- Style: New fonts in OpenPype style [\#2256](https://github.com/pypeclub/OpenPype/pull/2256) +- Tools: SceneInventory in OpenPype [\#2255](https://github.com/pypeclub/OpenPype/pull/2255) +- Tools: Tasks widget [\#2251](https://github.com/pypeclub/OpenPype/pull/2251) +- Tools: Creator in OpenPype [\#2244](https://github.com/pypeclub/OpenPype/pull/2244) +- Added endpoint for configured extensions [\#2221](https://github.com/pypeclub/OpenPype/pull/2221) + +**🐛 Bug fixes** + +- Tools: Parenting of tools in Nuke and Hiero [\#2266](https://github.com/pypeclub/OpenPype/pull/2266) +- limiting validator to specific editorial hosts [\#2264](https://github.com/pypeclub/OpenPype/pull/2264) +- Tools: Select Context dialog attribute fix [\#2261](https://github.com/pypeclub/OpenPype/pull/2261) +- Maya: Render publishing fails on linux [\#2260](https://github.com/pypeclub/OpenPype/pull/2260) +- LookAssigner: Fix tool reopen [\#2259](https://github.com/pypeclub/OpenPype/pull/2259) +- Standalone: editorial not publishing thumbnails on all subsets [\#2258](https://github.com/pypeclub/OpenPype/pull/2258) +- Burnins: Support mxf metadata [\#2247](https://github.com/pypeclub/OpenPype/pull/2247) +- Maya: Support for configurable AOV separator characters [\#2197](https://github.com/pypeclub/OpenPype/pull/2197) +- Maya: texture colorspace modes in looks [\#2195](https://github.com/pypeclub/OpenPype/pull/2195) + +## [3.6.1](https://github.com/pypeclub/OpenPype/tree/3.6.1) (2021-11-16) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.0...3.6.1) + +**🐛 Bug fixes** + +- Loader doesn't allow changing of version before loading [\#2254](https://github.com/pypeclub/OpenPype/pull/2254) + +## [3.6.0](https://github.com/pypeclub/OpenPype/tree/3.6.0) (2021-11-15) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.5.0...3.6.0) + +### 📖 Documentation + +- Add alternative sites for Site Sync [\#2206](https://github.com/pypeclub/OpenPype/pull/2206) +- Add command line way of running site sync server [\#2188](https://github.com/pypeclub/OpenPype/pull/2188) + +**🆕 New features** + +- Add validate active site button to sync queue on a project [\#2176](https://github.com/pypeclub/OpenPype/pull/2176) +- Maya : Colorspace configuration [\#2170](https://github.com/pypeclub/OpenPype/pull/2170) +- Blender: Added support for audio [\#2168](https://github.com/pypeclub/OpenPype/pull/2168) +- Flame: a host basic integration [\#2165](https://github.com/pypeclub/OpenPype/pull/2165) +- Houdini: simple HDA workflow [\#2072](https://github.com/pypeclub/OpenPype/pull/2072) +- Basic Royal Render Integration ✨ [\#2061](https://github.com/pypeclub/OpenPype/pull/2061) +- Camera handling between Blender and Unreal [\#1988](https://github.com/pypeclub/OpenPype/pull/1988) +- switch PyQt5 for PySide2 [\#1744](https://github.com/pypeclub/OpenPype/pull/1744) + +**🚀 Enhancements** + +- Tools: Subset manager in OpenPype [\#2243](https://github.com/pypeclub/OpenPype/pull/2243) +- General: Skip module directories without init file [\#2239](https://github.com/pypeclub/OpenPype/pull/2239) +- General: Static interfaces [\#2238](https://github.com/pypeclub/OpenPype/pull/2238) +- Style: Fix transparent image in style [\#2235](https://github.com/pypeclub/OpenPype/pull/2235) +- Add a "following workfile versioning" option on publish [\#2225](https://github.com/pypeclub/OpenPype/pull/2225) +- Modules: Module can add cli commands [\#2224](https://github.com/pypeclub/OpenPype/pull/2224) +- Webpublisher: Separate webpublisher logic [\#2222](https://github.com/pypeclub/OpenPype/pull/2222) +- Add both side availability on Site Sync sites to Loader [\#2220](https://github.com/pypeclub/OpenPype/pull/2220) +- Tools: Center loader and library loader on show [\#2219](https://github.com/pypeclub/OpenPype/pull/2219) +- Maya : Validate shape zero [\#2212](https://github.com/pypeclub/OpenPype/pull/2212) +- Maya : validate unique names [\#2211](https://github.com/pypeclub/OpenPype/pull/2211) +- Tools: OpenPype stylesheet in workfiles tool [\#2208](https://github.com/pypeclub/OpenPype/pull/2208) +- Ftrack: Replace Queue with deque in event handlers logic [\#2204](https://github.com/pypeclub/OpenPype/pull/2204) +- Tools: New select context dialog [\#2200](https://github.com/pypeclub/OpenPype/pull/2200) +- Maya : Validate mesh ngons [\#2199](https://github.com/pypeclub/OpenPype/pull/2199) +- Dirmap in Nuke [\#2198](https://github.com/pypeclub/OpenPype/pull/2198) +- Delivery: Check 'frame' key in template for sequence delivery [\#2196](https://github.com/pypeclub/OpenPype/pull/2196) +- Settings: Site sync project settings improvement [\#2193](https://github.com/pypeclub/OpenPype/pull/2193) +- Usage of tools code [\#2185](https://github.com/pypeclub/OpenPype/pull/2185) +- Settings: Dictionary based on project roots [\#2184](https://github.com/pypeclub/OpenPype/pull/2184) +- Subset name: Be able to pass asset document to get subset name [\#2179](https://github.com/pypeclub/OpenPype/pull/2179) +- Tools: Experimental tools [\#2167](https://github.com/pypeclub/OpenPype/pull/2167) +- Loader: Refactor and use OpenPype stylesheets [\#2166](https://github.com/pypeclub/OpenPype/pull/2166) +- Add loader for linked smart objects in photoshop [\#2149](https://github.com/pypeclub/OpenPype/pull/2149) +- Burnins: DNxHD profiles handling [\#2142](https://github.com/pypeclub/OpenPype/pull/2142) +- Tools: Single access point for host tools [\#2139](https://github.com/pypeclub/OpenPype/pull/2139) + +**🐛 Bug fixes** + +- Ftrack: Sync project ftrack id cache issue [\#2250](https://github.com/pypeclub/OpenPype/pull/2250) +- Ftrack: Session creation and Prepare project [\#2245](https://github.com/pypeclub/OpenPype/pull/2245) +- Added queue for studio processing in PS [\#2237](https://github.com/pypeclub/OpenPype/pull/2237) +- Python 2: Unicode to string conversion [\#2236](https://github.com/pypeclub/OpenPype/pull/2236) +- Fix - enum for color coding in PS [\#2234](https://github.com/pypeclub/OpenPype/pull/2234) +- Pyblish Tool: Fix targets handling [\#2232](https://github.com/pypeclub/OpenPype/pull/2232) +- Ftrack: Base event fix of 'get\_project\_from\_entity' method [\#2214](https://github.com/pypeclub/OpenPype/pull/2214) +- Maya : multiple subsets review broken [\#2210](https://github.com/pypeclub/OpenPype/pull/2210) +- Fix - different command used for Linux and Mac OS [\#2207](https://github.com/pypeclub/OpenPype/pull/2207) +- Tools: Workfiles tool don't use avalon widgets [\#2205](https://github.com/pypeclub/OpenPype/pull/2205) +- Ftrack: Fill missing ftrack id on mongo project [\#2203](https://github.com/pypeclub/OpenPype/pull/2203) +- Project Manager: Fix copying of tasks [\#2191](https://github.com/pypeclub/OpenPype/pull/2191) +- StandalonePublisher: Source validator don't expect representations [\#2190](https://github.com/pypeclub/OpenPype/pull/2190) +- Blender: Fix trying to pack an image when the shader node has no texture [\#2183](https://github.com/pypeclub/OpenPype/pull/2183) +- Maya: review viewport settings [\#2177](https://github.com/pypeclub/OpenPype/pull/2177) +- MacOS: Launching of applications may cause Permissions error [\#2175](https://github.com/pypeclub/OpenPype/pull/2175) +- Maya: Aspect ratio [\#2174](https://github.com/pypeclub/OpenPype/pull/2174) +- Blender: Fix 'Deselect All' with object not in 'Object Mode' [\#2163](https://github.com/pypeclub/OpenPype/pull/2163) +- Tools: Stylesheets are applied after tool show [\#2161](https://github.com/pypeclub/OpenPype/pull/2161) +- Maya: Collect render - fix UNC path support 🐛 [\#2158](https://github.com/pypeclub/OpenPype/pull/2158) +- Maya: Fix hotbox broken by scriptsmenu [\#2151](https://github.com/pypeclub/OpenPype/pull/2151) +- Ftrack: Ignore save warnings exception in Prepare project action [\#2150](https://github.com/pypeclub/OpenPype/pull/2150) +- Loader thumbnails with smooth edges [\#2147](https://github.com/pypeclub/OpenPype/pull/2147) +- Added validator for source files for Standalone Publisher [\#2138](https://github.com/pypeclub/OpenPype/pull/2138) + +**Merged pull requests:** + +- Bump pillow from 8.2.0 to 8.3.2 [\#2162](https://github.com/pypeclub/OpenPype/pull/2162) +- Bump axios from 0.21.1 to 0.21.4 in /website [\#2059](https://github.com/pypeclub/OpenPype/pull/2059) + +## [3.5.0](https://github.com/pypeclub/OpenPype/tree/3.5.0) (2021-10-17) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.4.1...3.5.0) + +**Deprecated:** + +- Maya: Change mayaAscii family to mayaScene [\#2106](https://github.com/pypeclub/OpenPype/pull/2106) + +**🆕 New features** + +- Added project and task into context change message in Maya [\#2131](https://github.com/pypeclub/OpenPype/pull/2131) +- Add ExtractBurnin to photoshop review [\#2124](https://github.com/pypeclub/OpenPype/pull/2124) +- PYPE-1218 - changed namespace to contain subset name in Maya [\#2114](https://github.com/pypeclub/OpenPype/pull/2114) +- Added running configurable disk mapping command before start of OP [\#2091](https://github.com/pypeclub/OpenPype/pull/2091) +- SFTP provider [\#2073](https://github.com/pypeclub/OpenPype/pull/2073) +- Maya: Validate setdress top group [\#2068](https://github.com/pypeclub/OpenPype/pull/2068) +- Maya: Enable publishing render attrib sets \(e.g. V-Ray Displacement\) with model [\#1955](https://github.com/pypeclub/OpenPype/pull/1955) + +**🚀 Enhancements** + +- Maya: make rig validators configurable in settings [\#2137](https://github.com/pypeclub/OpenPype/pull/2137) +- Settings: Updated readme for entity types in settings [\#2132](https://github.com/pypeclub/OpenPype/pull/2132) +- Nuke: unified clip loader [\#2128](https://github.com/pypeclub/OpenPype/pull/2128) +- Settings UI: Project model refreshing and sorting [\#2104](https://github.com/pypeclub/OpenPype/pull/2104) +- Create Read From Rendered - Disable Relative paths by default [\#2093](https://github.com/pypeclub/OpenPype/pull/2093) +- Added choosing different dirmap mapping if workfile synched locally [\#2088](https://github.com/pypeclub/OpenPype/pull/2088) +- General: Remove IdleManager module [\#2084](https://github.com/pypeclub/OpenPype/pull/2084) +- Tray UI: Message box about missing settings defaults [\#2080](https://github.com/pypeclub/OpenPype/pull/2080) +- Tray UI: Show menu where first click happened [\#2079](https://github.com/pypeclub/OpenPype/pull/2079) +- Global: add global validators to settings [\#2078](https://github.com/pypeclub/OpenPype/pull/2078) +- Use CRF for burnin when available [\#2070](https://github.com/pypeclub/OpenPype/pull/2070) +- Project manager: Filter first item after selection of project [\#2069](https://github.com/pypeclub/OpenPype/pull/2069) +- Nuke: Adding `still` image family workflow [\#2064](https://github.com/pypeclub/OpenPype/pull/2064) +- Maya: validate authorized loaded plugins [\#2062](https://github.com/pypeclub/OpenPype/pull/2062) +- Tools: add support for pyenv on windows [\#2051](https://github.com/pypeclub/OpenPype/pull/2051) +- SyncServer: Dropbox Provider [\#1979](https://github.com/pypeclub/OpenPype/pull/1979) +- Burnin: Get data from context with defined keys. [\#1897](https://github.com/pypeclub/OpenPype/pull/1897) +- Timers manager: Get task time [\#1896](https://github.com/pypeclub/OpenPype/pull/1896) +- TVPaint: Option to stop timer on application exit. [\#1887](https://github.com/pypeclub/OpenPype/pull/1887) + +**🐛 Bug fixes** + +- Maya: fix model publishing [\#2130](https://github.com/pypeclub/OpenPype/pull/2130) +- Fix - oiiotool wasn't recognized even if present [\#2129](https://github.com/pypeclub/OpenPype/pull/2129) +- General: Disk mapping group [\#2120](https://github.com/pypeclub/OpenPype/pull/2120) +- Hiero: publishing effect first time makes wrong resources path [\#2115](https://github.com/pypeclub/OpenPype/pull/2115) +- Add startup script for Houdini Core. [\#2110](https://github.com/pypeclub/OpenPype/pull/2110) +- TVPaint: Behavior name of loop also accept repeat [\#2109](https://github.com/pypeclub/OpenPype/pull/2109) +- Ftrack: Project settings save custom attributes skip unknown attributes [\#2103](https://github.com/pypeclub/OpenPype/pull/2103) +- Blender: Fix NoneType error when animation\_data is missing for a rig [\#2101](https://github.com/pypeclub/OpenPype/pull/2101) +- Fix broken import in sftp provider [\#2100](https://github.com/pypeclub/OpenPype/pull/2100) +- Global: Fix docstring on publish plugin extract review [\#2097](https://github.com/pypeclub/OpenPype/pull/2097) +- Delivery Action Files Sequence fix [\#2096](https://github.com/pypeclub/OpenPype/pull/2096) +- General: Cloud mongo ca certificate issue [\#2095](https://github.com/pypeclub/OpenPype/pull/2095) +- TVPaint: Creator use context from workfile [\#2087](https://github.com/pypeclub/OpenPype/pull/2087) +- Blender: fix texture missing when publishing blend files [\#2085](https://github.com/pypeclub/OpenPype/pull/2085) +- General: Startup validations oiio tool path fix on linux [\#2083](https://github.com/pypeclub/OpenPype/pull/2083) +- Deadline: Collect deadline server does not check existence of deadline key [\#2082](https://github.com/pypeclub/OpenPype/pull/2082) +- Blender: fixed Curves with modifiers in Rigs [\#2081](https://github.com/pypeclub/OpenPype/pull/2081) +- Nuke UI scaling [\#2077](https://github.com/pypeclub/OpenPype/pull/2077) +- Maya: Fix multi-camera renders [\#2065](https://github.com/pypeclub/OpenPype/pull/2065) +- Fix Sync Queue when project disabled [\#2063](https://github.com/pypeclub/OpenPype/pull/2063) + +**Merged pull requests:** + +- Bump pywin32 from 300 to 301 [\#2086](https://github.com/pypeclub/OpenPype/pull/2086) + +## [3.4.1](https://github.com/pypeclub/OpenPype/tree/3.4.1) (2021-09-23) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.4.0...3.4.1) + +**🆕 New features** + +- Settings: Flag project as deactivated and hide from tools' view [\#2008](https://github.com/pypeclub/OpenPype/pull/2008) + +**🚀 Enhancements** + +- General: Startup validations [\#2054](https://github.com/pypeclub/OpenPype/pull/2054) +- Nuke: proxy mode validator [\#2052](https://github.com/pypeclub/OpenPype/pull/2052) +- Ftrack: Removed ftrack interface [\#2049](https://github.com/pypeclub/OpenPype/pull/2049) +- Settings UI: Deffered set value on entity [\#2044](https://github.com/pypeclub/OpenPype/pull/2044) +- Loader: Families filtering [\#2043](https://github.com/pypeclub/OpenPype/pull/2043) +- Settings UI: Project view enhancements [\#2042](https://github.com/pypeclub/OpenPype/pull/2042) +- Settings for Nuke IncrementScriptVersion [\#2039](https://github.com/pypeclub/OpenPype/pull/2039) +- Loader & Library loader: Use tools from OpenPype [\#2038](https://github.com/pypeclub/OpenPype/pull/2038) +- Adding predefined project folders creation in PM [\#2030](https://github.com/pypeclub/OpenPype/pull/2030) +- WebserverModule: Removed interface of webserver module [\#2028](https://github.com/pypeclub/OpenPype/pull/2028) +- TimersManager: Removed interface of timers manager [\#2024](https://github.com/pypeclub/OpenPype/pull/2024) +- Feature Maya import asset from scene inventory [\#2018](https://github.com/pypeclub/OpenPype/pull/2018) + +**🐛 Bug fixes** + +- Timers manger: Typo fix [\#2058](https://github.com/pypeclub/OpenPype/pull/2058) +- Hiero: Editorial fixes [\#2057](https://github.com/pypeclub/OpenPype/pull/2057) +- Differentiate jpg sequences from thumbnail [\#2056](https://github.com/pypeclub/OpenPype/pull/2056) +- FFmpeg: Split command to list does not work [\#2046](https://github.com/pypeclub/OpenPype/pull/2046) +- Removed shell flag in subprocess call [\#2045](https://github.com/pypeclub/OpenPype/pull/2045) + +**Merged pull requests:** + +- Bump prismjs from 1.24.0 to 1.25.0 in /website [\#2050](https://github.com/pypeclub/OpenPype/pull/2050) + +## [3.4.0](https://github.com/pypeclub/OpenPype/tree/3.4.0) (2021-09-17) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.1...3.4.0) + +### 📖 Documentation + +- Documentation: Ftrack launch argsuments update [\#2014](https://github.com/pypeclub/OpenPype/pull/2014) +- Nuke Quick Start / Tutorial [\#1952](https://github.com/pypeclub/OpenPype/pull/1952) +- Houdini: add Camera, Point Cache, Composite, Redshift ROP and VDB Cache support [\#1821](https://github.com/pypeclub/OpenPype/pull/1821) + +**🆕 New features** + +- Nuke: Compatibility with Nuke 13 [\#2003](https://github.com/pypeclub/OpenPype/pull/2003) +- Maya: Add Xgen family support [\#1947](https://github.com/pypeclub/OpenPype/pull/1947) +- Feature/webpublisher backend [\#1876](https://github.com/pypeclub/OpenPype/pull/1876) +- Blender: Improved assets handling [\#1615](https://github.com/pypeclub/OpenPype/pull/1615) + +**🚀 Enhancements** + +- Added possibility to configure of synchronization of workfile version… [\#2041](https://github.com/pypeclub/OpenPype/pull/2041) +- General: Task types in profiles [\#2036](https://github.com/pypeclub/OpenPype/pull/2036) +- Console interpreter: Handle invalid sizes on initialization [\#2022](https://github.com/pypeclub/OpenPype/pull/2022) +- Ftrack: Show OpenPype versions in event server status [\#2019](https://github.com/pypeclub/OpenPype/pull/2019) +- General: Staging icon [\#2017](https://github.com/pypeclub/OpenPype/pull/2017) +- Ftrack: Sync to avalon actions have jobs [\#2015](https://github.com/pypeclub/OpenPype/pull/2015) +- Modules: Connect method is not required [\#2009](https://github.com/pypeclub/OpenPype/pull/2009) +- Settings UI: Number with configurable steps [\#2001](https://github.com/pypeclub/OpenPype/pull/2001) +- Moving project folder structure creation out of ftrack module \#1989 [\#1996](https://github.com/pypeclub/OpenPype/pull/1996) +- Configurable items for providers without Settings [\#1987](https://github.com/pypeclub/OpenPype/pull/1987) +- Global: Example addons [\#1986](https://github.com/pypeclub/OpenPype/pull/1986) +- Standalone Publisher: Extract harmony zip handle workfile template [\#1982](https://github.com/pypeclub/OpenPype/pull/1982) +- Settings UI: Number sliders [\#1978](https://github.com/pypeclub/OpenPype/pull/1978) +- Workfiles: Support more workfile templates [\#1966](https://github.com/pypeclub/OpenPype/pull/1966) +- Launcher: Fix crashes on action click [\#1964](https://github.com/pypeclub/OpenPype/pull/1964) +- Settings: Minor fixes in UI and missing default values [\#1963](https://github.com/pypeclub/OpenPype/pull/1963) +- Blender: Toggle system console works on windows [\#1962](https://github.com/pypeclub/OpenPype/pull/1962) +- Global: Settings defined by Addons/Modules [\#1959](https://github.com/pypeclub/OpenPype/pull/1959) +- CI: change release numbering triggers [\#1954](https://github.com/pypeclub/OpenPype/pull/1954) +- Global: Avalon Host name collector [\#1949](https://github.com/pypeclub/OpenPype/pull/1949) +- Global: Define hosts in CollectSceneVersion [\#1948](https://github.com/pypeclub/OpenPype/pull/1948) +- Add face sets to exported alembics [\#1942](https://github.com/pypeclub/OpenPype/pull/1942) +- OpenPype: Add version validation and `--headless` mode and update progress 🔄 [\#1939](https://github.com/pypeclub/OpenPype/pull/1939) +- \#1894 - adds host to template\_name\_profiles for filtering [\#1915](https://github.com/pypeclub/OpenPype/pull/1915) +- Environments: Tool environments in alphabetical order [\#1910](https://github.com/pypeclub/OpenPype/pull/1910) +- Disregard publishing time. [\#1888](https://github.com/pypeclub/OpenPype/pull/1888) +- Dynamic modules [\#1872](https://github.com/pypeclub/OpenPype/pull/1872) + +**🐛 Bug fixes** + +- Workfiles tool: Task selection [\#2040](https://github.com/pypeclub/OpenPype/pull/2040) +- Ftrack: Delete old versions missing settings key [\#2037](https://github.com/pypeclub/OpenPype/pull/2037) +- Nuke: typo on a button [\#2034](https://github.com/pypeclub/OpenPype/pull/2034) +- Hiero: Fix "none" named tags [\#2033](https://github.com/pypeclub/OpenPype/pull/2033) +- FFmpeg: Subprocess arguments as list [\#2032](https://github.com/pypeclub/OpenPype/pull/2032) +- General: Fix Python 2 breaking line [\#2016](https://github.com/pypeclub/OpenPype/pull/2016) +- Bugfix/webpublisher task type [\#2006](https://github.com/pypeclub/OpenPype/pull/2006) +- Nuke thumbnails generated from middle of the sequence [\#1992](https://github.com/pypeclub/OpenPype/pull/1992) +- Nuke: last version from path gets correct version [\#1990](https://github.com/pypeclub/OpenPype/pull/1990) +- nuke, resolve, hiero: precollector order lest then 0.5 [\#1984](https://github.com/pypeclub/OpenPype/pull/1984) +- Last workfile with multiple work templates [\#1981](https://github.com/pypeclub/OpenPype/pull/1981) +- Collectors order [\#1977](https://github.com/pypeclub/OpenPype/pull/1977) +- Stop timer was within validator order range. [\#1975](https://github.com/pypeclub/OpenPype/pull/1975) +- Ftrack: arrow submodule has https url source [\#1974](https://github.com/pypeclub/OpenPype/pull/1974) +- Ftrack: Fix hosts attribute in collect ftrack username [\#1972](https://github.com/pypeclub/OpenPype/pull/1972) +- Deadline: Houdini plugins in different hierarchy [\#1970](https://github.com/pypeclub/OpenPype/pull/1970) +- Removed deprecated submodules [\#1967](https://github.com/pypeclub/OpenPype/pull/1967) +- Global: ExtractJpeg can handle filepaths with spaces [\#1961](https://github.com/pypeclub/OpenPype/pull/1961) +- Resolve path when adding to zip [\#1960](https://github.com/pypeclub/OpenPype/pull/1960) + +**Merged pull requests:** + +- Bump url-parse from 1.5.1 to 1.5.3 in /website [\#1958](https://github.com/pypeclub/OpenPype/pull/1958) +- Bump path-parse from 1.0.6 to 1.0.7 in /website [\#1933](https://github.com/pypeclub/OpenPype/pull/1933) + +## [3.3.1](https://github.com/pypeclub/OpenPype/tree/3.3.1) (2021-08-20) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.0...3.3.1) + +**🐛 Bug fixes** + +- TVPaint: Fixed rendered frame indexes [\#1946](https://github.com/pypeclub/OpenPype/pull/1946) +- Maya: Menu actions fix [\#1945](https://github.com/pypeclub/OpenPype/pull/1945) +- standalone: editorial shared object problem [\#1941](https://github.com/pypeclub/OpenPype/pull/1941) +- Bugfix nuke deadline app name [\#1928](https://github.com/pypeclub/OpenPype/pull/1928) + +## [3.3.0](https://github.com/pypeclub/OpenPype/tree/3.3.0) (2021-08-17) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.2.0...3.3.0) + +### 📖 Documentation + +- Standalone Publish of textures family [\#1834](https://github.com/pypeclub/OpenPype/pull/1834) + +**🆕 New features** + +- Settings UI: Breadcrumbs in settings [\#1932](https://github.com/pypeclub/OpenPype/pull/1932) +- Maya: Scene patching 🩹on submission to Deadline [\#1923](https://github.com/pypeclub/OpenPype/pull/1923) +- Feature AE local render [\#1901](https://github.com/pypeclub/OpenPype/pull/1901) + +**🚀 Enhancements** + +- Python console interpreter [\#1940](https://github.com/pypeclub/OpenPype/pull/1940) +- Global: Updated logos and Default settings [\#1927](https://github.com/pypeclub/OpenPype/pull/1927) +- Check for missing ✨ Python when using `pyenv` [\#1925](https://github.com/pypeclub/OpenPype/pull/1925) +- Settings: Default values for enum [\#1920](https://github.com/pypeclub/OpenPype/pull/1920) +- Settings UI: Modifiable dict view enhance [\#1919](https://github.com/pypeclub/OpenPype/pull/1919) +- submodules: avalon-core update [\#1911](https://github.com/pypeclub/OpenPype/pull/1911) +- Ftrack: Where I run action enhancement [\#1900](https://github.com/pypeclub/OpenPype/pull/1900) +- Ftrack: Private project server actions [\#1899](https://github.com/pypeclub/OpenPype/pull/1899) +- Support nested studio plugins paths. [\#1898](https://github.com/pypeclub/OpenPype/pull/1898) +- Settings: global validators with options [\#1892](https://github.com/pypeclub/OpenPype/pull/1892) +- Settings: Conditional dict enum positioning [\#1891](https://github.com/pypeclub/OpenPype/pull/1891) +- Expose stop timer through rest api. [\#1886](https://github.com/pypeclub/OpenPype/pull/1886) +- TVPaint: Increment workfile [\#1885](https://github.com/pypeclub/OpenPype/pull/1885) +- Allow Multiple Notes to run on tasks. [\#1882](https://github.com/pypeclub/OpenPype/pull/1882) +- Prepare for pyside2 [\#1869](https://github.com/pypeclub/OpenPype/pull/1869) +- Filter hosts in settings host-enum [\#1868](https://github.com/pypeclub/OpenPype/pull/1868) +- Local actions with process identifier [\#1867](https://github.com/pypeclub/OpenPype/pull/1867) +- Workfile tool start at host launch support [\#1865](https://github.com/pypeclub/OpenPype/pull/1865) +- Anatomy schema validation [\#1864](https://github.com/pypeclub/OpenPype/pull/1864) +- Ftrack prepare project structure [\#1861](https://github.com/pypeclub/OpenPype/pull/1861) +- Maya: support for configurable `dirmap` 🗺️ [\#1859](https://github.com/pypeclub/OpenPype/pull/1859) +- Independent general environments [\#1853](https://github.com/pypeclub/OpenPype/pull/1853) +- TVPaint Start Frame [\#1844](https://github.com/pypeclub/OpenPype/pull/1844) +- Ftrack push attributes action adds traceback to job [\#1843](https://github.com/pypeclub/OpenPype/pull/1843) +- Prepare project action enhance [\#1838](https://github.com/pypeclub/OpenPype/pull/1838) +- nuke: settings create missing default subsets [\#1829](https://github.com/pypeclub/OpenPype/pull/1829) +- Update poetry lock [\#1823](https://github.com/pypeclub/OpenPype/pull/1823) +- Settings: settings for plugins [\#1819](https://github.com/pypeclub/OpenPype/pull/1819) +- Settings list can use template or schema as object type [\#1815](https://github.com/pypeclub/OpenPype/pull/1815) +- Maya: Deadline custom settings [\#1797](https://github.com/pypeclub/OpenPype/pull/1797) +- Maya: Shader name validation [\#1762](https://github.com/pypeclub/OpenPype/pull/1762) + +**🐛 Bug fixes** + +- Fix - ftrack family was added incorrectly in some cases [\#1935](https://github.com/pypeclub/OpenPype/pull/1935) +- Fix - Deadline publish on Linux started Tray instead of headless publishing [\#1930](https://github.com/pypeclub/OpenPype/pull/1930) +- Maya: Validate Model Name - repair accident deletion in settings defaults [\#1929](https://github.com/pypeclub/OpenPype/pull/1929) +- Nuke: submit to farm failed due `ftrack` family remove [\#1926](https://github.com/pypeclub/OpenPype/pull/1926) +- Fix - validate takes repre\["files"\] as list all the time [\#1922](https://github.com/pypeclub/OpenPype/pull/1922) +- standalone: validator asset parents [\#1917](https://github.com/pypeclub/OpenPype/pull/1917) +- Nuke: update video file crassing [\#1916](https://github.com/pypeclub/OpenPype/pull/1916) +- Fix - texture validators for workfiles triggers only for textures workfiles [\#1914](https://github.com/pypeclub/OpenPype/pull/1914) +- Settings UI: List order works as expected [\#1906](https://github.com/pypeclub/OpenPype/pull/1906) +- Hiero: loaded clip was not set colorspace from version data [\#1904](https://github.com/pypeclub/OpenPype/pull/1904) +- Pyblish UI: Fix collecting stage processing [\#1903](https://github.com/pypeclub/OpenPype/pull/1903) +- Burnins: Use input's bitrate in h624 [\#1902](https://github.com/pypeclub/OpenPype/pull/1902) +- Bug: fixed python detection [\#1893](https://github.com/pypeclub/OpenPype/pull/1893) +- global: integrate name missing default template [\#1890](https://github.com/pypeclub/OpenPype/pull/1890) +- publisher: editorial plugins fixes [\#1889](https://github.com/pypeclub/OpenPype/pull/1889) +- Normalize path returned from Workfiles. [\#1880](https://github.com/pypeclub/OpenPype/pull/1880) +- Workfiles tool event arguments fix [\#1862](https://github.com/pypeclub/OpenPype/pull/1862) +- imageio: fix grouping [\#1856](https://github.com/pypeclub/OpenPype/pull/1856) +- Maya: don't add reference members as connections to the container set 📦 [\#1855](https://github.com/pypeclub/OpenPype/pull/1855) +- publisher: missing version in subset prop [\#1849](https://github.com/pypeclub/OpenPype/pull/1849) +- Ftrack type error fix in sync to avalon event handler [\#1845](https://github.com/pypeclub/OpenPype/pull/1845) +- Nuke: updating effects subset fail [\#1841](https://github.com/pypeclub/OpenPype/pull/1841) +- nuke: write render node skipped with crop [\#1836](https://github.com/pypeclub/OpenPype/pull/1836) +- Project folder structure overrides [\#1813](https://github.com/pypeclub/OpenPype/pull/1813) +- Maya: fix yeti settings path in extractor [\#1809](https://github.com/pypeclub/OpenPype/pull/1809) +- Failsafe for cross project containers. [\#1806](https://github.com/pypeclub/OpenPype/pull/1806) +- Houdini colector formatting keys fix [\#1802](https://github.com/pypeclub/OpenPype/pull/1802) +- Settings error dialog on show [\#1798](https://github.com/pypeclub/OpenPype/pull/1798) +- Application launch stdout/stderr in GUI build [\#1684](https://github.com/pypeclub/OpenPype/pull/1684) +- Nuke: re-use instance nodes output path [\#1577](https://github.com/pypeclub/OpenPype/pull/1577) + +**Merged pull requests:** + +- Fix - make AE workfile publish to Ftrack configurable [\#1937](https://github.com/pypeclub/OpenPype/pull/1937) +- Add support for multiple Deadline ☠️➖ servers [\#1905](https://github.com/pypeclub/OpenPype/pull/1905) +- Maya: add support for `RedshiftNormalMap` node, fix `tx` linear space 🚀 [\#1863](https://github.com/pypeclub/OpenPype/pull/1863) +- Maya: expected files -\> render products ⚙️ overhaul [\#1812](https://github.com/pypeclub/OpenPype/pull/1812) +- PS, AE - send actual context when another webserver is running [\#1811](https://github.com/pypeclub/OpenPype/pull/1811) + +## [3.2.0](https://github.com/pypeclub/OpenPype/tree/3.2.0) (2021-07-13) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/2.18.4...3.2.0) + +### 📖 Documentation + +- Fix: staging and `--use-version` option [\#1786](https://github.com/pypeclub/OpenPype/pull/1786) +- Subset template and TVPaint subset template docs [\#1717](https://github.com/pypeclub/OpenPype/pull/1717) +- Overscan color extract review [\#1701](https://github.com/pypeclub/OpenPype/pull/1701) + +**🚀 Enhancements** + +- Nuke: ftrack family plugin settings preset [\#1805](https://github.com/pypeclub/OpenPype/pull/1805) +- Standalone publisher last project [\#1799](https://github.com/pypeclub/OpenPype/pull/1799) +- Ftrack Multiple notes as server action [\#1795](https://github.com/pypeclub/OpenPype/pull/1795) +- Settings conditional dict [\#1777](https://github.com/pypeclub/OpenPype/pull/1777) +- Settings application use python 2 only where needed [\#1776](https://github.com/pypeclub/OpenPype/pull/1776) +- Settings UI copy/paste [\#1769](https://github.com/pypeclub/OpenPype/pull/1769) +- Workfile tool widths [\#1766](https://github.com/pypeclub/OpenPype/pull/1766) +- Push hierarchical attributes care about task parent changes [\#1763](https://github.com/pypeclub/OpenPype/pull/1763) +- Application executables with environment variables [\#1757](https://github.com/pypeclub/OpenPype/pull/1757) +- Deadline: Nuke submission additional attributes [\#1756](https://github.com/pypeclub/OpenPype/pull/1756) +- Settings schema without prefill [\#1753](https://github.com/pypeclub/OpenPype/pull/1753) +- Settings Hosts enum [\#1739](https://github.com/pypeclub/OpenPype/pull/1739) +- Validate containers settings [\#1736](https://github.com/pypeclub/OpenPype/pull/1736) +- PS - added loader from sequence [\#1726](https://github.com/pypeclub/OpenPype/pull/1726) +- Autoupdate launcher [\#1725](https://github.com/pypeclub/OpenPype/pull/1725) +- Toggle Ftrack upload in StandalonePublisher [\#1708](https://github.com/pypeclub/OpenPype/pull/1708) +- Nuke: Prerender Frame Range by default [\#1699](https://github.com/pypeclub/OpenPype/pull/1699) +- Smoother edges of color triangle [\#1695](https://github.com/pypeclub/OpenPype/pull/1695) + +**🐛 Bug fixes** + +- nuke: fixing wrong name of family folder when `used existing frames` [\#1803](https://github.com/pypeclub/OpenPype/pull/1803) +- Collect ftrack family bugs [\#1801](https://github.com/pypeclub/OpenPype/pull/1801) +- Invitee email can be None which break the Ftrack commit. [\#1788](https://github.com/pypeclub/OpenPype/pull/1788) +- Otio unrelated error on import [\#1782](https://github.com/pypeclub/OpenPype/pull/1782) +- FFprobe streams order [\#1775](https://github.com/pypeclub/OpenPype/pull/1775) +- Fix - single file files are str only, cast it to list to count properly [\#1772](https://github.com/pypeclub/OpenPype/pull/1772) +- Environments in app executable for MacOS [\#1768](https://github.com/pypeclub/OpenPype/pull/1768) +- Project specific environments [\#1767](https://github.com/pypeclub/OpenPype/pull/1767) +- Settings UI with refresh button [\#1764](https://github.com/pypeclub/OpenPype/pull/1764) +- Standalone publisher thumbnail extractor fix [\#1761](https://github.com/pypeclub/OpenPype/pull/1761) +- Anatomy others templates don't cause crash [\#1758](https://github.com/pypeclub/OpenPype/pull/1758) +- Backend acre module commit update [\#1745](https://github.com/pypeclub/OpenPype/pull/1745) +- hiero: precollect instances failing when audio selected [\#1743](https://github.com/pypeclub/OpenPype/pull/1743) +- Hiero: creator instance error [\#1742](https://github.com/pypeclub/OpenPype/pull/1742) +- Nuke: fixing render creator for no selection format failing [\#1741](https://github.com/pypeclub/OpenPype/pull/1741) +- StandalonePublisher: failing collector for editorial [\#1738](https://github.com/pypeclub/OpenPype/pull/1738) +- Local settings UI crash on missing defaults [\#1737](https://github.com/pypeclub/OpenPype/pull/1737) +- TVPaint white background on thumbnail [\#1735](https://github.com/pypeclub/OpenPype/pull/1735) +- Ftrack missing custom attribute message [\#1734](https://github.com/pypeclub/OpenPype/pull/1734) +- Launcher project changes [\#1733](https://github.com/pypeclub/OpenPype/pull/1733) +- Ftrack sync status [\#1732](https://github.com/pypeclub/OpenPype/pull/1732) +- TVPaint use layer name for default variant [\#1724](https://github.com/pypeclub/OpenPype/pull/1724) +- Default subset template for TVPaint review and workfile families [\#1716](https://github.com/pypeclub/OpenPype/pull/1716) +- Maya: Extract review hotfix [\#1714](https://github.com/pypeclub/OpenPype/pull/1714) +- Settings: Imageio improving granularity [\#1711](https://github.com/pypeclub/OpenPype/pull/1711) +- Application without executables [\#1679](https://github.com/pypeclub/OpenPype/pull/1679) +- Unreal: launching on Linux [\#1672](https://github.com/pypeclub/OpenPype/pull/1672) + +**Merged pull requests:** + +- Bump prismjs from 1.23.0 to 1.24.0 in /website [\#1773](https://github.com/pypeclub/OpenPype/pull/1773) +- TVPaint ftrack family [\#1755](https://github.com/pypeclub/OpenPype/pull/1755) + +## [2.18.4](https://github.com/pypeclub/OpenPype/tree/2.18.4) (2021-06-24) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/2.18.3...2.18.4) + +## [2.18.3](https://github.com/pypeclub/OpenPype/tree/2.18.3) (2021-06-23) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/2.18.2...2.18.3) + +## [2.18.2](https://github.com/pypeclub/OpenPype/tree/2.18.2) (2021-06-16) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.1.0...2.18.2) + +## [3.1.0](https://github.com/pypeclub/OpenPype/tree/3.1.0) (2021-06-15) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.0.0...3.1.0) + +### 📖 Documentation + +- Feature Slack integration [\#1657](https://github.com/pypeclub/OpenPype/pull/1657) + +**🚀 Enhancements** + +- Log Viewer with OpenPype style [\#1703](https://github.com/pypeclub/OpenPype/pull/1703) +- Scrolling in OpenPype info widget [\#1702](https://github.com/pypeclub/OpenPype/pull/1702) +- OpenPype style in modules [\#1694](https://github.com/pypeclub/OpenPype/pull/1694) +- Sort applications and tools alphabetically in Settings UI [\#1689](https://github.com/pypeclub/OpenPype/pull/1689) +- \#683 - Validate Frame Range in Standalone Publisher [\#1683](https://github.com/pypeclub/OpenPype/pull/1683) +- Hiero: old container versions identify with red color [\#1682](https://github.com/pypeclub/OpenPype/pull/1682) +- Project Manger: Default name column width [\#1669](https://github.com/pypeclub/OpenPype/pull/1669) +- Remove outline in stylesheet [\#1667](https://github.com/pypeclub/OpenPype/pull/1667) +- TVPaint: Creator take layer name as default value for subset variant [\#1663](https://github.com/pypeclub/OpenPype/pull/1663) +- TVPaint custom subset template [\#1662](https://github.com/pypeclub/OpenPype/pull/1662) +- Editorial: conform assets validator [\#1659](https://github.com/pypeclub/OpenPype/pull/1659) +- Nuke - Publish simplification [\#1653](https://github.com/pypeclub/OpenPype/pull/1653) +- \#1333 - added tooltip hints to Pyblish buttons [\#1649](https://github.com/pypeclub/OpenPype/pull/1649) + +**🐛 Bug fixes** + +- Nuke: broken publishing rendered frames [\#1707](https://github.com/pypeclub/OpenPype/pull/1707) +- Standalone publisher Thumbnail export args [\#1705](https://github.com/pypeclub/OpenPype/pull/1705) +- Bad zip can break OpenPype start [\#1691](https://github.com/pypeclub/OpenPype/pull/1691) +- Hiero: published whole edit mov [\#1687](https://github.com/pypeclub/OpenPype/pull/1687) +- Ftrack subprocess handle of stdout/stderr [\#1675](https://github.com/pypeclub/OpenPype/pull/1675) +- Settings list race condifiton and mutable dict list conversion [\#1671](https://github.com/pypeclub/OpenPype/pull/1671) +- Mac launch arguments fix [\#1660](https://github.com/pypeclub/OpenPype/pull/1660) +- Fix missing dbm python module [\#1652](https://github.com/pypeclub/OpenPype/pull/1652) +- Transparent branches in view on Mac [\#1648](https://github.com/pypeclub/OpenPype/pull/1648) +- Add asset on task item [\#1646](https://github.com/pypeclub/OpenPype/pull/1646) +- Project manager save and queue [\#1645](https://github.com/pypeclub/OpenPype/pull/1645) +- New project anatomy values [\#1644](https://github.com/pypeclub/OpenPype/pull/1644) +- Farm publishing: check if published items do exist [\#1573](https://github.com/pypeclub/OpenPype/pull/1573) + +**Merged pull requests:** + +- Bump normalize-url from 4.5.0 to 4.5.1 in /website [\#1686](https://github.com/pypeclub/OpenPype/pull/1686) + ## [3.0.0](https://github.com/pypeclub/openpype/tree/3.0.0) @@ -11,12 +1817,12 @@ - Easy to add Application versions. - Per Project Environment and plugin management. - Robust profile system for creating reviewables and burnins, with filtering based on Application, Task and data family. -- Configurable publish plugins. +- Configurable publish plugins. - Options to make any validator or extractor, optional or disabled. - Color Management is now unified under anatomy settings. - Subset naming and grouping is fully configurable. - All project attributes can now be set directly in OpenPype settings. -- Studio Setting can be locked to prevent unwanted artist changes. +- Studio Setting can be locked to prevent unwanted artist changes. - You can now add per project and per task type templates for workfile initialization in most hosts. - Too many other individual configurable option to list in this changelog :) @@ -774,8 +2580,6 @@ - Standalone Publisher: getting fps from context instead of nonexistent entity [\#729](https://github.com/pypeclub/pype/pull/729) -# Changelog - ## [2.13.6](https://github.com/pypeclub/pype/tree/2.13.6) (2020-11-15) [Full Changelog](https://github.com/pypeclub/pype/compare/2.13.5...2.13.6) @@ -1565,10 +3369,4 @@ A large cleanup release. Most of the change are under the hood. - _(avalon)_ subsets in maya 2019 weren't behaving correctly in the outliner -\* *This Changelog was automatically generated by [github_changelog_generator](https://github.com/github-changelog-generator/github-changelog-generator)* - - -\* *This Changelog was automatically generated by [github_changelog_generator](https://github.com/github-changelog-generator/github-changelog-generator)* - - \* *This Changelog was automatically generated by [github_changelog_generator](https://github.com/github-changelog-generator/github-changelog-generator)* diff --git a/README.md b/README.md index b8c04f8b49..a3d3cf1dbb 100644 --- a/README.md +++ b/README.md @@ -41,7 +41,7 @@ It can be built and ran on all common platforms. We develop and test on the foll - **Linux** - **Ubuntu** 20.04 LTS - **Centos** 7 -- **Mac OSX** +- **Mac OSX** - **10.15** Catalina - **11.1** Big Sur (using Rosetta2) @@ -287,6 +287,14 @@ To run tests, execute `.\tools\run_tests(.ps1|.sh)`. **Note that it needs existing virtual environment.** + +Developer tools +------------- + +In case you wish to add your own tools to `.\tools` folder without git tracking, it is possible by adding it with `dev_*` suffix (example: `dev_clear_pyc(.ps1|.sh)`). + + + ## Contributors ✨ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)): diff --git a/common/openpype_common/distribution/README.md b/common/openpype_common/distribution/README.md new file mode 100644 index 0000000000..212eb267b8 --- /dev/null +++ b/common/openpype_common/distribution/README.md @@ -0,0 +1,18 @@ +Addon distribution tool +------------------------ + +Code in this folder is backend portion of Addon distribution logic for v4 server. + +Each host, module will be separate Addon in the future. Each v4 server could run different set of Addons. + +Client (running on artist machine) will in the first step ask v4 for list of enabled addons. +(It expects list of json documents matching to `addon_distribution.py:AddonInfo` object.) +Next it will compare presence of enabled addon version in local folder. In the case of missing version of +an addon, client will use information in the addon to download (from http/shared local disk/git) zip file +and unzip it. + +Required part of addon distribution will be sharing of dependencies (python libraries, utilities) which is not part of this folder. + +Location of this folder might change in the future as it will be required for a clint to add this folder to sys.path reliably. + +This code needs to be independent on Openpype code as much as possible! \ No newline at end of file diff --git a/openpype/hosts/testhost/__init__.py b/common/openpype_common/distribution/__init__.py similarity index 100% rename from openpype/hosts/testhost/__init__.py rename to common/openpype_common/distribution/__init__.py diff --git a/common/openpype_common/distribution/addon_distribution.py b/common/openpype_common/distribution/addon_distribution.py new file mode 100644 index 0000000000..5e48639dec --- /dev/null +++ b/common/openpype_common/distribution/addon_distribution.py @@ -0,0 +1,208 @@ +import os +from enum import Enum +from abc import abstractmethod +import attr +import logging +import requests +import platform +import shutil + +from .file_handler import RemoteFileHandler +from .addon_info import AddonInfo + + +class UpdateState(Enum): + EXISTS = "exists" + UPDATED = "updated" + FAILED = "failed" + + +class AddonDownloader: + log = logging.getLogger(__name__) + + def __init__(self): + self._downloaders = {} + + def register_format(self, downloader_type, downloader): + self._downloaders[downloader_type.value] = downloader + + def get_downloader(self, downloader_type): + downloader = self._downloaders.get(downloader_type) + if not downloader: + raise ValueError(f"{downloader_type} not implemented") + return downloader() + + @classmethod + @abstractmethod + def download(cls, source, destination): + """Returns url to downloaded addon zip file. + + Args: + source (dict): {type:"http", "url":"https://} ...} + destination (str): local folder to unzip + Returns: + (str) local path to addon zip file + """ + pass + + @classmethod + def check_hash(cls, addon_path, addon_hash): + """Compares 'hash' of downloaded 'addon_url' file. + + Args: + addon_path (str): local path to addon zip file + addon_hash (str): sha256 hash of zip file + Raises: + ValueError if hashes doesn't match + """ + if not os.path.exists(addon_path): + raise ValueError(f"{addon_path} doesn't exist.") + if not RemoteFileHandler.check_integrity(addon_path, + addon_hash, + hash_type="sha256"): + raise ValueError(f"{addon_path} doesn't match expected hash.") + + @classmethod + def unzip(cls, addon_zip_path, destination): + """Unzips local 'addon_zip_path' to 'destination'. + + Args: + addon_zip_path (str): local path to addon zip file + destination (str): local folder to unzip + """ + RemoteFileHandler.unzip(addon_zip_path, destination) + os.remove(addon_zip_path) + + @classmethod + def remove(cls, addon_url): + pass + + +class OSAddonDownloader(AddonDownloader): + + @classmethod + def download(cls, source, destination): + # OS doesnt need to download, unzip directly + addon_url = source["path"].get(platform.system().lower()) + if not os.path.exists(addon_url): + raise ValueError("{} is not accessible".format(addon_url)) + return addon_url + + +class HTTPAddonDownloader(AddonDownloader): + CHUNK_SIZE = 100000 + + @classmethod + def download(cls, source, destination): + source_url = source["url"] + cls.log.debug(f"Downloading {source_url} to {destination}") + file_name = os.path.basename(destination) + _, ext = os.path.splitext(file_name) + if (ext.replace(".", '') not + in set(RemoteFileHandler.IMPLEMENTED_ZIP_FORMATS)): + file_name += ".zip" + RemoteFileHandler.download_url(source_url, + destination, + filename=file_name) + + return os.path.join(destination, file_name) + + +def get_addons_info(server_endpoint): + """Returns list of addon information from Server""" + # TODO temp + # addon_info = AddonInfo( + # **{"name": "openpype_slack", + # "version": "1.0.0", + # "addon_url": "c:/projects/openpype_slack_1.0.0.zip", + # "type": UrlType.FILESYSTEM, + # "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa + # + # http_addon = AddonInfo( + # **{"name": "openpype_slack", + # "version": "1.0.0", + # "addon_url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing", # noqa + # "type": UrlType.HTTP, + # "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa + + response = requests.get(server_endpoint) + if not response.ok: + raise Exception(response.text) + + addons_info = [] + for addon in response.json(): + addons_info.append(AddonInfo(**addon)) + return addons_info + + +def update_addon_state(addon_infos, destination_folder, factory, + log=None): + """Loops through all 'addon_infos', compares local version, unzips. + + Loops through server provided list of dictionaries with information about + available addons. Looks if each addon is already present and deployed. + If isn't, addon zip gets downloaded and unzipped into 'destination_folder'. + Args: + addon_infos (list of AddonInfo) + destination_folder (str): local path + factory (AddonDownloader): factory to get appropriate downloader per + addon type + log (logging.Logger) + Returns: + (dict): {"addon_full_name": UpdateState.value + (eg. "exists"|"updated"|"failed") + """ + if not log: + log = logging.getLogger(__name__) + + download_states = {} + for addon in addon_infos: + full_name = "{}_{}".format(addon.name, addon.version) + addon_dest = os.path.join(destination_folder, full_name) + + if os.path.isdir(addon_dest): + log.debug(f"Addon version folder {addon_dest} already exists.") + download_states[full_name] = UpdateState.EXISTS.value + continue + + for source in addon.sources: + download_states[full_name] = UpdateState.FAILED.value + try: + downloader = factory.get_downloader(source.type) + zip_file_path = downloader.download(attr.asdict(source), + addon_dest) + downloader.check_hash(zip_file_path, addon.hash) + downloader.unzip(zip_file_path, addon_dest) + download_states[full_name] = UpdateState.UPDATED.value + break + except Exception: + log.warning(f"Error happened during updating {addon.name}", + exc_info=True) + if os.path.isdir(addon_dest): + log.debug(f"Cleaning {addon_dest}") + shutil.rmtree(addon_dest) + + return download_states + + +def check_addons(server_endpoint, addon_folder, downloaders): + """Main entry point to compare existing addons with those on server. + + Args: + server_endpoint (str): url to v4 server endpoint + addon_folder (str): local dir path for addons + downloaders (AddonDownloader): factory of downloaders + + Raises: + (RuntimeError) if any addon failed update + """ + addons_info = get_addons_info(server_endpoint) + result = update_addon_state(addons_info, + addon_folder, + downloaders) + if UpdateState.FAILED.value in result.values(): + raise RuntimeError(f"Unable to update some addons {result}") + + +def cli(*args): + raise NotImplementedError diff --git a/common/openpype_common/distribution/addon_info.py b/common/openpype_common/distribution/addon_info.py new file mode 100644 index 0000000000..00ece11f3b --- /dev/null +++ b/common/openpype_common/distribution/addon_info.py @@ -0,0 +1,80 @@ +import attr +from enum import Enum + + +class UrlType(Enum): + HTTP = "http" + GIT = "git" + FILESYSTEM = "filesystem" + + +@attr.s +class MultiPlatformPath(object): + windows = attr.ib(default=None) + linux = attr.ib(default=None) + darwin = attr.ib(default=None) + + +@attr.s +class AddonSource(object): + type = attr.ib() + + +@attr.s +class LocalAddonSource(AddonSource): + path = attr.ib(default=attr.Factory(MultiPlatformPath)) + + +@attr.s +class WebAddonSource(AddonSource): + url = attr.ib(default=None) + + +@attr.s +class VersionData(object): + version_data = attr.ib(default=None) + + +@attr.s +class AddonInfo(object): + """Object matching json payload from Server""" + name = attr.ib() + version = attr.ib() + title = attr.ib(default=None) + sources = attr.ib(default=attr.Factory(dict)) + hash = attr.ib(default=None) + description = attr.ib(default=None) + license = attr.ib(default=None) + authors = attr.ib(default=None) + + @classmethod + def from_dict(cls, data): + sources = [] + + production_version = data.get("productionVersion") + if not production_version: + return + + # server payload contains info about all versions + # active addon must have 'productionVersion' and matching version info + version_data = data.get("versions", {})[production_version] + + for source in version_data.get("clientSourceInfo", []): + if source.get("type") == UrlType.FILESYSTEM.value: + source_addon = LocalAddonSource(type=source["type"], + path=source["path"]) + if source.get("type") == UrlType.HTTP.value: + source_addon = WebAddonSource(type=source["type"], + url=source["url"]) + + sources.append(source_addon) + + return cls(name=data.get("name"), + version=production_version, + sources=sources, + hash=data.get("hash"), + description=data.get("description"), + title=data.get("title"), + license=data.get("license"), + authors=data.get("authors")) + diff --git a/tests/lib/file_handler.py b/common/openpype_common/distribution/file_handler.py similarity index 83% rename from tests/lib/file_handler.py rename to common/openpype_common/distribution/file_handler.py index ee3abc6ecb..f585c77632 100644 --- a/tests/lib/file_handler.py +++ b/common/openpype_common/distribution/file_handler.py @@ -21,7 +21,7 @@ class RemoteFileHandler: 'tar.gz', 'tar.xz', 'tar.bz2'] @staticmethod - def calculate_md5(fpath, chunk_size): + def calculate_md5(fpath, chunk_size=10000): md5 = hashlib.md5() with open(fpath, 'rb') as f: for chunk in iter(lambda: f.read(chunk_size), b''): @@ -33,17 +33,45 @@ class RemoteFileHandler: return md5 == RemoteFileHandler.calculate_md5(fpath, **kwargs) @staticmethod - def check_integrity(fpath, md5=None): + def calculate_sha256(fpath): + """Calculate sha256 for content of the file. + + Args: + fpath (str): Path to file. + + Returns: + str: hex encoded sha256 + + """ + h = hashlib.sha256() + b = bytearray(128 * 1024) + mv = memoryview(b) + with open(fpath, 'rb', buffering=0) as f: + for n in iter(lambda: f.readinto(mv), 0): + h.update(mv[:n]) + return h.hexdigest() + + @staticmethod + def check_sha256(fpath, sha256, **kwargs): + return sha256 == RemoteFileHandler.calculate_sha256(fpath, **kwargs) + + @staticmethod + def check_integrity(fpath, hash_value=None, hash_type=None): if not os.path.isfile(fpath): return False - if md5 is None: + if hash_value is None: return True - return RemoteFileHandler.check_md5(fpath, md5) + if not hash_type: + raise ValueError("Provide hash type, md5 or sha256") + if hash_type == 'md5': + return RemoteFileHandler.check_md5(fpath, hash_value) + if hash_type == "sha256": + return RemoteFileHandler.check_sha256(fpath, hash_value) @staticmethod def download_url( url, root, filename=None, - md5=None, max_redirect_hops=3 + sha256=None, max_redirect_hops=3 ): """Download a file from a url and place it in root. Args: @@ -51,7 +79,7 @@ class RemoteFileHandler: root (str): Directory to place downloaded file in filename (str, optional): Name to save the file under. If None, use the basename of the URL - md5 (str, optional): MD5 checksum of the download. + sha256 (str, optional): sha256 checksum of the download. If None, do not check max_redirect_hops (int, optional): Maximum number of redirect hops allowed @@ -64,7 +92,8 @@ class RemoteFileHandler: os.makedirs(root, exist_ok=True) # check if file is already present locally - if RemoteFileHandler.check_integrity(fpath, md5): + if RemoteFileHandler.check_integrity(fpath, + sha256, hash_type="sha256"): print('Using downloaded and verified file: ' + fpath) return @@ -76,7 +105,7 @@ class RemoteFileHandler: file_id = RemoteFileHandler._get_google_drive_file_id(url) if file_id is not None: return RemoteFileHandler.download_file_from_google_drive( - file_id, root, filename, md5) + file_id, root, filename, sha256) # download the file try: @@ -92,20 +121,21 @@ class RemoteFileHandler: raise e # check integrity of downloaded file - if not RemoteFileHandler.check_integrity(fpath, md5): + if not RemoteFileHandler.check_integrity(fpath, + sha256, hash_type="sha256"): raise RuntimeError("File not found or corrupted.") @staticmethod def download_file_from_google_drive(file_id, root, filename=None, - md5=None): + sha256=None): """Download a Google Drive file from and place it in root. Args: file_id (str): id of file to be downloaded root (str): Directory to place downloaded file in filename (str, optional): Name to save the file under. If None, use the id of the file. - md5 (str, optional): MD5 checksum of the download. + sha256 (str, optional): sha256 checksum of the download. If None, do not check """ # Based on https://stackoverflow.com/questions/38511444/python-download-files-from-google-drive-using-url # noqa @@ -119,8 +149,8 @@ class RemoteFileHandler: os.makedirs(root, exist_ok=True) - if os.path.isfile(fpath) and RemoteFileHandler.check_integrity(fpath, - md5): + if os.path.isfile(fpath) and RemoteFileHandler.check_integrity( + fpath, sha256, hash_type="sha256"): print('Using downloaded and verified file: ' + fpath) else: session = requests.Session() diff --git a/common/openpype_common/distribution/tests/test_addon_distributtion.py b/common/openpype_common/distribution/tests/test_addon_distributtion.py new file mode 100644 index 0000000000..765ea0596a --- /dev/null +++ b/common/openpype_common/distribution/tests/test_addon_distributtion.py @@ -0,0 +1,167 @@ +import pytest +import attr +import tempfile + +from common.openpype_common.distribution.addon_distribution import ( + AddonDownloader, + OSAddonDownloader, + HTTPAddonDownloader, + AddonInfo, + update_addon_state, + UpdateState +) +from common.openpype_common.distribution.addon_info import UrlType + + +@pytest.fixture +def addon_downloader(): + addon_downloader = AddonDownloader() + addon_downloader.register_format(UrlType.FILESYSTEM, OSAddonDownloader) + addon_downloader.register_format(UrlType.HTTP, HTTPAddonDownloader) + + yield addon_downloader + + +@pytest.fixture +def http_downloader(addon_downloader): + yield addon_downloader.get_downloader(UrlType.HTTP.value) + + +@pytest.fixture +def temp_folder(): + yield tempfile.mkdtemp() + + +@pytest.fixture +def sample_addon_info(): + addon_info = { + "versions": { + "1.0.0": { + "clientPyproject": { + "tool": { + "poetry": { + "dependencies": { + "nxtools": "^1.6", + "orjson": "^3.6.7", + "typer": "^0.4.1", + "email-validator": "^1.1.3", + "python": "^3.10", + "fastapi": "^0.73.0" + } + } + } + }, + "hasSettings": True, + "clientSourceInfo": [ + { + "type": "http", + "url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing" # noqa + }, + { + "type": "filesystem", + "path": { + "windows": ["P:/sources/some_file.zip", + "W:/sources/some_file.zip"], # noqa + "linux": ["/mnt/srv/sources/some_file.zip"], + "darwin": ["/Volumes/srv/sources/some_file.zip"] + } + } + ], + "frontendScopes": { + "project": { + "sidebar": "hierarchy" + } + } + } + }, + "description": "", + "title": "Slack addon", + "name": "openpype_slack", + "productionVersion": "1.0.0", + "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658" # noqa + } + yield addon_info + + +def test_register(printer): + addon_downloader = AddonDownloader() + + assert len(addon_downloader._downloaders) == 0, "Contains registered" + + addon_downloader.register_format(UrlType.FILESYSTEM, OSAddonDownloader) + assert len(addon_downloader._downloaders) == 1, "Should contain one" + + +def test_get_downloader(printer, addon_downloader): + assert addon_downloader.get_downloader(UrlType.FILESYSTEM.value), "Should find" # noqa + + with pytest.raises(ValueError): + addon_downloader.get_downloader("unknown"), "Shouldn't find" + + +def test_addon_info(printer, sample_addon_info): + """Tests parsing of expected payload from v4 server into AadonInfo.""" + valid_minimum = { + "name": "openpype_slack", + "productionVersion": "1.0.0", + "versions": { + "1.0.0": { + "clientSourceInfo": [ + { + "type": "filesystem", + "path": { + "windows": [ + "P:/sources/some_file.zip", + "W:/sources/some_file.zip"], + "linux": [ + "/mnt/srv/sources/some_file.zip"], + "darwin": [ + "/Volumes/srv/sources/some_file.zip"] # noqa + } + } + ] + } + } + } + + assert AddonInfo.from_dict(valid_minimum), "Missing required fields" + + valid_minimum["versions"].pop("1.0.0") + with pytest.raises(KeyError): + assert not AddonInfo.from_dict(valid_minimum), "Must fail without version data" # noqa + + valid_minimum.pop("productionVersion") + assert not AddonInfo.from_dict( + valid_minimum), "none if not productionVersion" # noqa + + addon = AddonInfo.from_dict(sample_addon_info) + assert addon, "Should be created" + assert addon.name == "openpype_slack", "Incorrect name" + assert addon.version == "1.0.0", "Incorrect version" + + with pytest.raises(TypeError): + assert addon["name"], "Dict approach not implemented" + + addon_as_dict = attr.asdict(addon) + assert addon_as_dict["name"], "Dict approach should work" + + +def test_update_addon_state(printer, sample_addon_info, + temp_folder, addon_downloader): + """Tests possible cases of addon update.""" + addon_info = AddonInfo.from_dict(sample_addon_info) + orig_hash = addon_info.hash + + addon_info.hash = "brokenhash" + result = update_addon_state([addon_info], temp_folder, addon_downloader) + assert result["openpype_slack_1.0.0"] == UpdateState.FAILED.value, \ + "Update should failed because of wrong hash" + + addon_info.hash = orig_hash + result = update_addon_state([addon_info], temp_folder, addon_downloader) + assert result["openpype_slack_1.0.0"] == UpdateState.UPDATED.value, \ + "Addon should have been updated" + + result = update_addon_state([addon_info], temp_folder, addon_downloader) + assert result["openpype_slack_1.0.0"] == UpdateState.EXISTS.value, \ + "Addon should already exist" diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index c5003b062e..ccc9d4ac52 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -63,7 +63,7 @@ class OpenPypeVersion(semver.VersionInfo): """ staging = False path = None - _VERSION_REGEX = re.compile(r"(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$") # noqa: E501 + _VERSION_REGEX = re.compile(r"(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?") # noqa: E501 _installed_version = None def __init__(self, *args, **kwargs): diff --git a/igniter/install_dialog.py b/igniter/install_dialog.py index b09529f5c5..65ddd58735 100644 --- a/igniter/install_dialog.py +++ b/igniter/install_dialog.py @@ -388,8 +388,11 @@ class InstallDialog(QtWidgets.QDialog): install_thread.start() def _installation_finished(self): + # TODO we should find out why status can be set to 'None'? + # - 'InstallThread.run' should handle all cases so not sure where + # that come from status = self._install_thread.result() - if status >= 0: + if status is not None and status >= 0: self._update_progress(100) QtWidgets.QApplication.processEvents() self.done(3) diff --git a/openpype/action.py b/openpype/action.py index 50741875e4..de9cdee010 100644 --- a/openpype/action.py +++ b/openpype/action.py @@ -1,44 +1,82 @@ -# absolute_import is needed to counter the `module has no cmds error` in Maya -from __future__ import absolute_import - +import warnings +import functools import pyblish.api -def get_errored_instances_from_context(context): - - instances = list() - for result in context.data["results"]: - if result["instance"] is None: - # When instance is None we are on the "context" result - continue - - if result["error"]: - instances.append(result["instance"]) - - return instances +class ActionDeprecatedWarning(DeprecationWarning): + pass -def get_errored_plugins_from_data(context): - """Get all failed validation plugins - - Args: - context (object): - - Returns: - list of plugins which failed during validation +def deprecated(new_destination): + """Mark functions as deprecated. + It will result in a warning being emitted when the function is used. """ - plugins = list() - results = context.data.get("results", []) - for result in results: - if result["success"] is True: - continue - plugins.append(result["plugin"]) + func = None + if callable(new_destination): + func = new_destination + new_destination = None - return plugins + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + warnings.simplefilter("always", ActionDeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(decorated_func.__name__, warning_message), + category=ActionDeprecatedWarning, + stacklevel=4 + ) + return decorated_func(*args, **kwargs) + return wrapper + + if func is None: + return _decorator + return _decorator(func) +@deprecated("openpype.pipeline.publish.get_errored_instances_from_context") +def get_errored_instances_from_context(context): + """ + Deprecated: + Since 3.14.* will be removed in 3.16.* or later. + """ + + from openpype.pipeline.publish import get_errored_instances_from_context + + return get_errored_instances_from_context(context) + + +@deprecated("openpype.pipeline.publish.get_errored_plugins_from_context") +def get_errored_plugins_from_data(context): + """ + Deprecated: + Since 3.14.* will be removed in 3.16.* or later. + """ + + from openpype.pipeline.publish import get_errored_plugins_from_context + + return get_errored_plugins_from_context(context) + + +# 'RepairAction' and 'RepairContextAction' were moved to +# 'openpype.pipeline.publish' please change you imports. +# There is no "reasonable" way hot mark these classes as deprecated to show +# warning of wrong import. +# Deprecated since 3.14.* will be removed in 3.16.* class RepairAction(pyblish.api.Action): """Repairs the action @@ -65,6 +103,7 @@ class RepairAction(pyblish.api.Action): plugin.repair(instance) +# Deprecated since 3.14.* will be removed in 3.16.* class RepairContextAction(pyblish.api.Action): """Repairs the action diff --git a/openpype/api.py b/openpype/api.py index c2227c1a52..b60cd21d2b 100644 --- a/openpype/api.py +++ b/openpype/api.py @@ -11,7 +11,6 @@ from .lib import ( PypeLogger, Logger, Anatomy, - config, execute, run_subprocess, version_up, @@ -49,7 +48,6 @@ from .plugin import ( ValidateContentsOrder, ValidateSceneOrder, ValidateMeshOrder, - ValidationException ) # temporary fix, might @@ -73,7 +71,6 @@ __all__ = [ "PypeLogger", "Logger", "Anatomy", - "config", "execute", "get_default_components", "ApplicationManager", @@ -94,8 +91,6 @@ __all__ = [ "RepairAction", "RepairContextAction", - "ValidationException", - # get contextual data "version_up", "get_asset", diff --git a/openpype/cli.py b/openpype/cli.py index 398d1a94c0..d24cd4a872 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -277,6 +277,13 @@ def projectmanager(): PypeCommands().launch_project_manager() +@main.command(context_settings={"ignore_unknown_options": True}) +def publish_report_viewer(): + from openpype.tools.publisher.publish_report_viewer import main + + sys.exit(main()) + + @main.command() @click.argument("output_path") @click.option("--project", help="Define project context") diff --git a/openpype/client/__init__.py b/openpype/client/__init__.py index 64a82334d9..7831afd8ad 100644 --- a/openpype/client/__init__.py +++ b/openpype/client/__init__.py @@ -45,6 +45,17 @@ from .entities import ( get_workfile_info, ) +from .entity_links import ( + get_linked_asset_ids, + get_linked_assets, + get_linked_representation_id, +) + +from .operations import ( + create_project, +) + + __all__ = ( "OpenPypeMongoConnection", @@ -88,4 +99,10 @@ __all__ = ( "get_thumbnail_id_from_source", "get_workfile_info", + + "get_linked_asset_ids", + "get_linked_assets", + "get_linked_representation_id", + + "create_project", ) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index f1f1d30214..43afccf2f1 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -14,6 +14,8 @@ from bson.objectid import ObjectId from .mongo import get_project_database, get_project_connection +PatternType = type(re.compile("")) + def _prepare_fields(fields, required_fields=None): if not fields: @@ -32,17 +34,37 @@ def _prepare_fields(fields, required_fields=None): return output -def _convert_id(in_id): +def convert_id(in_id): + """Helper function for conversion of id from string to ObjectId. + + Args: + in_id (Union[str, ObjectId, Any]): Entity id that should be converted + to right type for queries. + + Returns: + Union[ObjectId, Any]: Converted ids to ObjectId or in type. + """ + if isinstance(in_id, six.string_types): return ObjectId(in_id) return in_id -def _convert_ids(in_ids): +def convert_ids(in_ids): + """Helper function for conversion of ids from string to ObjectId. + + Args: + in_ids (Iterable[Union[str, ObjectId, Any]]): List of entity ids that + should be converted to right type for queries. + + Returns: + List[ObjectId]: Converted ids to ObjectId. + """ + _output = set() for in_id in in_ids: if in_id is not None: - _output.add(_convert_id(in_id)) + _output.add(convert_id(in_id)) return list(_output) @@ -58,7 +80,7 @@ def get_projects(active=True, inactive=False, fields=None): yield project_doc -def get_project(project_name, active=True, inactive=False, fields=None): +def get_project(project_name, active=True, inactive=True, fields=None): # Skip if both are disabled if not active and not inactive: return None @@ -115,7 +137,7 @@ def get_asset_by_id(project_name, asset_id, fields=None): None: Asset was not found by id. """ - asset_id = _convert_id(asset_id) + asset_id = convert_id(asset_id) if not asset_id: return None @@ -196,7 +218,7 @@ def _get_assets( query_filter = {"type": {"$in": asset_types}} if asset_ids is not None: - asset_ids = _convert_ids(asset_ids) + asset_ids = convert_ids(asset_ids) if not asset_ids: return [] query_filter["_id"] = {"$in": asset_ids} @@ -207,7 +229,7 @@ def _get_assets( query_filter["name"] = {"$in": list(asset_names)} if parent_ids is not None: - parent_ids = _convert_ids(parent_ids) + parent_ids = convert_ids(parent_ids) if not parent_ids: return [] query_filter["data.visualParent"] = {"$in": parent_ids} @@ -307,7 +329,7 @@ def get_asset_ids_with_subsets(project_name, asset_ids=None): "type": "subset" } if asset_ids is not None: - asset_ids = _convert_ids(asset_ids) + asset_ids = convert_ids(asset_ids) if not asset_ids: return [] subset_query["parent"] = {"$in": asset_ids} @@ -347,7 +369,7 @@ def get_subset_by_id(project_name, subset_id, fields=None): Dict: Subset document which can be reduced to specified 'fields'. """ - subset_id = _convert_id(subset_id) + subset_id = convert_id(subset_id) if not subset_id: return None @@ -374,7 +396,7 @@ def get_subset_by_name(project_name, subset_name, asset_id, fields=None): if not subset_name: return None - asset_id = _convert_id(asset_id) + asset_id = convert_id(asset_id) if not asset_id: return None @@ -428,13 +450,13 @@ def get_subsets( query_filter = {"type": {"$in": subset_types}} if asset_ids is not None: - asset_ids = _convert_ids(asset_ids) + asset_ids = convert_ids(asset_ids) if not asset_ids: return [] query_filter["parent"] = {"$in": asset_ids} if subset_ids is not None: - subset_ids = _convert_ids(subset_ids) + subset_ids = convert_ids(subset_ids) if not subset_ids: return [] query_filter["_id"] = {"$in": subset_ids} @@ -449,7 +471,7 @@ def get_subsets( for asset_id, names in names_by_asset_ids.items(): if asset_id and names: or_query.append({ - "parent": _convert_id(asset_id), + "parent": convert_id(asset_id), "name": {"$in": list(names)} }) if not or_query: @@ -510,7 +532,7 @@ def get_version_by_id(project_name, version_id, fields=None): Dict: Version document which can be reduced to specified 'fields'. """ - version_id = _convert_id(version_id) + version_id = convert_id(version_id) if not version_id: return None @@ -537,7 +559,7 @@ def get_version_by_name(project_name, version, subset_id, fields=None): Dict: Version document which can be reduced to specified 'fields'. """ - subset_id = _convert_id(subset_id) + subset_id = convert_id(subset_id) if not subset_id: return None @@ -567,7 +589,7 @@ def version_is_latest(project_name, version_id): bool: True if is latest version from subset else False. """ - version_id = _convert_id(version_id) + version_id = convert_id(version_id) if not version_id: return False version_doc = get_version_by_id( @@ -610,13 +632,13 @@ def _get_versions( query_filter = {"type": {"$in": version_types}} if subset_ids is not None: - subset_ids = _convert_ids(subset_ids) + subset_ids = convert_ids(subset_ids) if not subset_ids: return [] query_filter["parent"] = {"$in": subset_ids} if version_ids is not None: - version_ids = _convert_ids(version_ids) + version_ids = convert_ids(version_ids) if not version_ids: return [] query_filter["_id"] = {"$in": version_ids} @@ -690,7 +712,7 @@ def get_hero_version_by_subset_id(project_name, subset_id, fields=None): Dict: Hero version entity data. """ - subset_id = _convert_id(subset_id) + subset_id = convert_id(subset_id) if not subset_id: return None @@ -720,7 +742,7 @@ def get_hero_version_by_id(project_name, version_id, fields=None): Dict: Hero version entity data. """ - version_id = _convert_id(version_id) + version_id = convert_id(version_id) if not version_id: return None @@ -786,7 +808,7 @@ def get_output_link_versions(project_name, version_id, fields=None): links for passed version. """ - version_id = _convert_id(version_id) + version_id = convert_id(version_id) if not version_id: return [] @@ -812,7 +834,7 @@ def get_last_versions(project_name, subset_ids, fields=None): dict[ObjectId, int]: Key is subset id and value is last version name. """ - subset_ids = _convert_ids(subset_ids) + subset_ids = convert_ids(subset_ids) if not subset_ids: return {} @@ -898,7 +920,7 @@ def get_last_version_by_subset_id(project_name, subset_id, fields=None): Dict: Version document which can be reduced to specified 'fields'. """ - subset_id = _convert_id(subset_id) + subset_id = convert_id(subset_id) if not subset_id: return None @@ -971,7 +993,7 @@ def get_representation_by_id(project_name, representation_id, fields=None): "type": {"$in": repre_types} } if representation_id is not None: - query_filter["_id"] = _convert_id(representation_id) + query_filter["_id"] = convert_id(representation_id) conn = get_project_connection(project_name) @@ -996,7 +1018,7 @@ def get_representation_by_name( to specified 'fields'. """ - version_id = _convert_id(version_id) + version_id = convert_id(version_id) if not version_id or not representation_name: return None repre_types = ["representation", "archived_representations"] @@ -1034,11 +1056,11 @@ def _regex_filters(filters): for key, value in filters.items(): regexes = [] a_values = [] - if isinstance(value, re.Pattern): + if isinstance(value, PatternType): regexes.append(value) elif isinstance(value, (list, tuple, set)): for item in value: - if isinstance(item, re.Pattern): + if isinstance(item, PatternType): regexes.append(item) else: a_values.append(item) @@ -1089,7 +1111,7 @@ def _get_representations( query_filter = {"type": {"$in": repre_types}} if representation_ids is not None: - representation_ids = _convert_ids(representation_ids) + representation_ids = convert_ids(representation_ids) if not representation_ids: return default_output query_filter["_id"] = {"$in": representation_ids} @@ -1100,7 +1122,7 @@ def _get_representations( query_filter["name"] = {"$in": list(representation_names)} if version_ids is not None: - version_ids = _convert_ids(version_ids) + version_ids = convert_ids(version_ids) if not version_ids: return default_output query_filter["parent"] = {"$in": version_ids} @@ -1111,7 +1133,7 @@ def _get_representations( for version_id, names in names_by_version_ids.items(): if version_id and names: or_query.append({ - "parent": _convert_id(version_id), + "parent": convert_id(version_id), "name": {"$in": list(names)} }) if not or_query: @@ -1174,7 +1196,7 @@ def get_representations( as filter. Filter ignored if 'None' is passed. version_ids (Iterable[str]): Subset ids used as parent filter. Filter ignored if 'None' is passed. - context_filters (Dict[str, List[str, re.Pattern]]): Filter by + context_filters (Dict[str, List[str, PatternType]]): Filter by representation context fields. names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering using version ids and list of names under the version. @@ -1220,7 +1242,7 @@ def get_archived_representations( as filter. Filter ignored if 'None' is passed. version_ids (Iterable[str]): Subset ids used as parent filter. Filter ignored if 'None' is passed. - context_filters (Dict[str, List[str, re.Pattern]]): Filter by + context_filters (Dict[str, List[str, PatternType]]): Filter by representation context fields. names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering using version ids and list of names under the version. @@ -1361,7 +1383,7 @@ def get_thumbnail_id_from_source(project_name, src_type, src_id): if not src_type or not src_id: return None - query_filter = {"_id": _convert_id(src_id)} + query_filter = {"_id": convert_id(src_id)} conn = get_project_connection(project_name) src_doc = conn.find_one(query_filter, {"data.thumbnail_id"}) @@ -1388,7 +1410,7 @@ def get_thumbnails(project_name, thumbnail_ids, fields=None): """ if thumbnail_ids: - thumbnail_ids = _convert_ids(thumbnail_ids) + thumbnail_ids = convert_ids(thumbnail_ids) if not thumbnail_ids: return [] @@ -1416,7 +1438,7 @@ def get_thumbnail(project_name, thumbnail_id, fields=None): if not thumbnail_id: return None - query_filter = {"type": "thumbnail", "_id": _convert_id(thumbnail_id)} + query_filter = {"type": "thumbnail", "_id": convert_id(thumbnail_id)} conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) @@ -1444,7 +1466,7 @@ def get_workfile_info( query_filter = { "type": "workfile", - "parent": _convert_id(asset_id), + "parent": convert_id(asset_id), "task_name": task_name, "filename": filename } @@ -1455,7 +1477,7 @@ def get_workfile_info( """ ## Custom data storage: - Settings - OP settings overrides and local settings -- Logging - logs from PypeLogger +- Logging - logs from Logger - Webpublisher - jobs - Ftrack - events - Maya - Shaders diff --git a/openpype/client/entity_links.py b/openpype/client/entity_links.py new file mode 100644 index 0000000000..e42ac58aff --- /dev/null +++ b/openpype/client/entity_links.py @@ -0,0 +1,241 @@ +from .mongo import get_project_connection +from .entities import ( + get_assets, + get_asset_by_id, + get_version_by_id, + get_representation_by_id, + convert_id, +) + + +def get_linked_asset_ids(project_name, asset_doc=None, asset_id=None): + """Extract linked asset ids from asset document. + + One of asset document or asset id must be passed. + + Note: + Asset links now works only from asset to assets. + + Args: + asset_doc (dict): Asset document from DB. + + Returns: + List[Union[ObjectId, str]]: Asset ids of input links. + """ + + output = [] + if not asset_doc and not asset_id: + return output + + if not asset_doc: + asset_doc = get_asset_by_id( + project_name, asset_id, fields=["data.inputLinks"] + ) + + input_links = asset_doc["data"].get("inputLinks") + if not input_links: + return output + + for item in input_links: + # Backwards compatibility for "_id" key which was replaced with + # "id" + if "_id" in item: + link_id = item["_id"] + else: + link_id = item["id"] + output.append(link_id) + return output + + +def get_linked_assets( + project_name, asset_doc=None, asset_id=None, fields=None +): + """Return linked assets based on passed asset document. + + One of asset document or asset id must be passed. + + Args: + project_name (str): Name of project where to look for queried entities. + asset_doc (Dict[str, Any]): Asset document from database. + asset_id (Union[ObjectId, str]): Asset id. Can be used instead of + asset document. + fields (Iterable[str]): Fields that should be returned. All fields are + returned if 'None' is passed. + + Returns: + List[Dict[str, Any]]: Asset documents of input links for passed + asset doc. + """ + + if not asset_doc: + if not asset_id: + return [] + asset_doc = get_asset_by_id( + project_name, + asset_id, + fields=["data.inputLinks"] + ) + if not asset_doc: + return [] + + link_ids = get_linked_asset_ids(project_name, asset_doc=asset_doc) + if not link_ids: + return [] + + return list(get_assets(project_name, asset_ids=link_ids, fields=fields)) + + +def get_linked_representation_id( + project_name, repre_doc=None, repre_id=None, link_type=None, max_depth=None +): + """Returns list of linked ids of particular type (if provided). + + One of representation document or representation id must be passed. + Note: + Representation links now works only from representation through version + back to representations. + + Args: + project_name (str): Name of project where look for links. + repre_doc (Dict[str, Any]): Representation document. + repre_id (Union[ObjectId, str]): Representation id. + link_type (str): Type of link (e.g. 'reference', ...). + max_depth (int): Limit recursion level. Default: 0 + + Returns: + List[ObjectId] Linked representation ids. + """ + + if repre_doc: + repre_id = repre_doc["_id"] + + if repre_id: + repre_id = convert_id(repre_id) + + if not repre_id and not repre_doc: + return [] + + version_id = None + if repre_doc: + version_id = repre_doc.get("parent") + + if not version_id: + repre_doc = get_representation_by_id( + project_name, repre_id, fields=["parent"] + ) + version_id = repre_doc["parent"] + + if not version_id: + return [] + + version_doc = get_version_by_id( + project_name, version_id, fields=["type", "version_id"] + ) + if version_doc["type"] == "hero_version": + version_id = version_doc["version_id"] + + if max_depth is None: + max_depth = 0 + + match = { + "_id": version_id, + # Links are not stored to hero versions at this moment so filter + # is limited to just versions + "type": "version" + } + + graph_lookup = { + "from": project_name, + "startWith": "$data.inputLinks.id", + "connectFromField": "data.inputLinks.id", + "connectToField": "_id", + "as": "outputs_recursive", + "depthField": "depth" + } + if max_depth != 0: + # We offset by -1 since 0 basically means no recursion + # but the recursion only happens after the initial lookup + # for outputs. + graph_lookup["maxDepth"] = max_depth - 1 + + query_pipeline = [ + # Match + {"$match": match}, + # Recursive graph lookup for inputs + {"$graphLookup": graph_lookup} + ] + + conn = get_project_connection(project_name) + result = conn.aggregate(query_pipeline) + referenced_version_ids = _process_referenced_pipeline_result( + result, link_type + ) + if not referenced_version_ids: + return [] + + ref_ids = conn.distinct( + "_id", + filter={ + "parent": {"$in": list(referenced_version_ids)}, + "type": "representation" + } + ) + + return list(ref_ids) + + +def _process_referenced_pipeline_result(result, link_type): + """Filters result from pipeline for particular link_type. + + Pipeline cannot use link_type directly in a query. + + Returns: + (list) + """ + + referenced_version_ids = set() + correctly_linked_ids = set() + for item in result: + input_links = item.get("data", {}).get("inputLinks") + if not input_links: + continue + + _filter_input_links( + input_links, + link_type, + correctly_linked_ids + ) + + # outputs_recursive in random order, sort by depth + outputs_recursive = item.get("outputs_recursive") + if not outputs_recursive: + continue + + for output in sorted(outputs_recursive, key=lambda o: o["depth"]): + output_links = output.get("data", {}).get("inputLinks") + if not output_links: + continue + + # Leaf + if output["_id"] not in correctly_linked_ids: + continue + + _filter_input_links( + output_links, + link_type, + correctly_linked_ids + ) + + referenced_version_ids.add(output["_id"]) + + return referenced_version_ids + + +def _filter_input_links(input_links, link_type, correctly_linked_ids): + for input_link in input_links: + if link_type and input_link["type"] != link_type: + continue + + link_id = input_link.get("id") or input_link.get("_id") + if link_id is not None: + correctly_linked_ids.add(link_id) diff --git a/openpype/client/notes.md b/openpype/client/notes.md new file mode 100644 index 0000000000..a261b86eca --- /dev/null +++ b/openpype/client/notes.md @@ -0,0 +1,39 @@ +# Client functionality +## Reason +Preparation for OpenPype v4 server. Goal is to remove direct mongo calls in code to prepare a little bit for different source of data for code before. To start think about database calls less as mongo calls but more universally. To do so was implemented simple wrapper around database calls to not use pymongo specific code. + +Current goal is not to make universal database model which can be easily replaced with any different source of data but to make it close as possible. Current implementation of OpenPype is too tighly connected to pymongo and it's abilities so we're trying to get closer with long term changes that can be used even in current state. + +## Queries +Query functions don't use full potential of mongo queries like very specific queries based on subdictionaries or unknown structures. We try to avoid these calls as much as possible because they'll probably won't be available in future. If it's really necessary a new function can be added but only if it's reasonable for overall logic. All query functions were moved to `~/client/entities.py`. Each function has arguments with available filters and possible reduce of returned keys for each entity. + +## Changes +Changes are a little bit complicated. Mongo has many options how update can happen which had to be reduced also it would be at this stage complicated to validate values which are created or updated thus automation is at this point almost none. Changes can be made using operations available in `~/client/operations.py`. Each operation require project name and entity type, but may require operation specific data. + +### Create +Create operations expect already prepared document data, for that are prepared functions creating skeletal structures of documents (do not fill all required data), except `_id` all data should be right. Existence of entity is not validated so if the same creation operation is send n times it will create the entity n times which can cause issues. + +### Update +Update operation require entity id and keys that should be changed, update dictionary must have {"key": value}. If value should be set in nested dictionary the key must have also all subkeys joined with dot `.` (e.g. `{"data": {"fps": 25}}` -> `{"data.fps": 25}`). To simplify update dictionaries were prepared functions which does that for you, their name has template `prepare__update_data` - they work on comparison of previous document and new document. If there is missing function for requested entity type it is because we didn't need it yet and require implementaion. + +### Delete +Delete operation need entity id. Entity will be deleted from mongo. + + +## What (probably) won't be replaced +Some parts of code are still using direct mongo calls. In most of cases it is for very specific calls that are module specific or their usage will completely change in future. +- Mongo calls that are not project specific (out of `avalon` collection) will be removed or will have to use different mechanism how the data are stored. At this moment it is related to OpenPype settings and logs, ftrack server events, some other data. +- Sync server queries. They're complex and very specific for sync server module. Their replacement will require specific calls to OpenPype server in v4 thus their abstraction with wrapper is irrelevant and would complicate production in v3. +- Project managers (ftrack, kitsu, shotgrid, embedded Project Manager, etc.). Project managers are creating, updating or removing assets in v3, but in v4 will create folders with different structure. Wrapping creation of assets would not help to prepare for v4 because of new data structures. The same can be said about editorial Extract Hierarchy Avalon plugin which create project structure. +- Code parts that is marked as deprecated in v3 or will be deprecated in v4. + - integrate asset legacy publish plugin - already is legacy kept for safety + - integrate thumbnail - thumbnails will be stored in different way in v4 + - input links - link will be stored in different way and will have different mechanism of linking. In v3 are links limited to same entity type "asset <-> asset" or "representation <-> representation". + +## Known missing replacements +- change subset group in loader tool +- integrate subset group +- query input links in openpype lib +- create project in openpype lib +- save/create workfile doc in openpype lib +- integrate hero version diff --git a/openpype/client/operations.py b/openpype/client/operations.py index c4b95bf696..fd639c34a7 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -1,3 +1,4 @@ +import re import uuid import copy import collections @@ -8,16 +9,24 @@ from bson.objectid import ObjectId from pymongo import DeleteOne, InsertOne, UpdateOne from .mongo import get_project_connection +from .entities import get_project REMOVED_VALUE = object() +PROJECT_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_" +PROJECT_NAME_REGEX = re.compile( + "^[{}]+$".format(PROJECT_NAME_ALLOWED_SYMBOLS) +) + CURRENT_PROJECT_SCHEMA = "openpype:project-3.0" CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0" CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0" CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0" CURRENT_VERSION_SCHEMA = "openpype:version-3.0" +CURRENT_HERO_VERSION_SCHEMA = "openpype:hero_version-1.0" CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0" CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0" +CURRENT_THUMBNAIL_SCHEMA = "openpype:thumbnail-1.0" def _create_or_convert_to_mongo_id(mongo_id): @@ -154,6 +163,34 @@ def new_version_doc(version, subset_id, data=None, entity_id=None): } +def new_hero_version_doc(version_id, subset_id, data=None, entity_id=None): + """Create skeleton data of hero version document. + + Args: + version_id (ObjectId): Is considered as unique identifier of version + under subset. + subset_id (Union[str, ObjectId]): Id of parent subset. + data (Dict[str, Any]): Version document data. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of version document. + """ + + if data is None: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "schema": CURRENT_HERO_VERSION_SCHEMA, + "type": "hero_version", + "version_id": version_id, + "parent": subset_id, + "data": data + } + + def new_representation_doc( name, version_id, context, data=None, entity_id=None ): @@ -189,6 +226,29 @@ def new_representation_doc( } +def new_thumbnail_doc(data=None, entity_id=None): + """Create skeleton data of thumbnail document. + + Args: + data (Dict[str, Any]): Thumbnail document data. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of thumbnail document. + """ + + if data is None: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "type": "thumbnail", + "schema": CURRENT_THUMBNAIL_SCHEMA, + "data": data + } + + def new_workfile_info_doc( filename, asset_id, task_name, files, data=None, entity_id=None ): @@ -262,6 +322,20 @@ def prepare_version_update_data(old_doc, new_doc, replace=True): return _prepare_update_data(old_doc, new_doc, replace) +def prepare_hero_version_update_data(old_doc, new_doc, replace=True): + """Compare two hero version documents and prepare update data. + + Based on compared values will create update data for 'UpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + def prepare_representation_update_data(old_doc, new_doc, replace=True): """Compare two representation documents and prepare update data. @@ -444,7 +518,7 @@ class UpdateOperation(AbstractOperation): set_data = {} for key, value in self._update_data.items(): if value is REMOVED_VALUE: - unset_data[key] = value + unset_data[key] = None else: set_data[key] = value @@ -632,3 +706,89 @@ class OperationsSession(object): operation = DeleteOperation(project_name, entity_type, entity_id) self.add(operation) return operation + + +def create_project(project_name, project_code, library_project=False): + """Create project using OpenPype settings. + + This project creation function is not validating project document on + creation. It is because project document is created blindly with only + minimum required information about project which is it's name, code, type + and schema. + + Entered project name must be unique and project must not exist yet. + + Note: + This function is here to be OP v4 ready but in v3 has more logic + to do. That's why inner imports are in the body. + + Args: + project_name(str): New project name. Should be unique. + project_code(str): Project's code should be unique too. + library_project(bool): Project is library project. + + Raises: + ValueError: When project name already exists in MongoDB. + + Returns: + dict: Created project document. + """ + + from openpype.settings import ProjectSettings, SaveWarningExc + from openpype.pipeline.schema import validate + + if get_project(project_name, fields=["name"]): + raise ValueError("Project with name \"{}\" already exists".format( + project_name + )) + + if not PROJECT_NAME_REGEX.match(project_name): + raise ValueError(( + "Project name \"{}\" contain invalid characters" + ).format(project_name)) + + project_doc = { + "type": "project", + "name": project_name, + "data": { + "code": project_code, + "library_project": library_project + }, + "schema": CURRENT_PROJECT_SCHEMA + } + + op_session = OperationsSession() + # Insert document with basic data + create_op = op_session.create_entity( + project_name, project_doc["type"], project_doc + ) + op_session.commit() + + # Load ProjectSettings for the project and save it to store all attributes + # and Anatomy + try: + project_settings_entity = ProjectSettings(project_name) + project_settings_entity.save() + except SaveWarningExc as exc: + print(str(exc)) + except Exception: + op_session.delete_entity( + project_name, project_doc["type"], create_op.entity_id + ) + op_session.commit() + raise + + project_doc = get_project(project_name) + + try: + # Validate created project document + validate(project_doc) + except Exception: + # Remove project if is not valid + op_session.delete_entity( + project_name, project_doc["type"], create_op.entity_id + ) + op_session.commit() + raise + + return project_doc diff --git a/openpype/hooks/pre_add_last_workfile_arg.py b/openpype/hooks/pre_add_last_workfile_arg.py index 8edccd48d4..3609620917 100644 --- a/openpype/hooks/pre_add_last_workfile_arg.py +++ b/openpype/hooks/pre_add_last_workfile_arg.py @@ -19,6 +19,7 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook): "hiero", "houdini", "nukestudio", + "fusion", "blender", "photoshop", "tvpaint", diff --git a/openpype/hooks/pre_create_extra_workdir_folders.py b/openpype/hooks/pre_create_extra_workdir_folders.py index d79c5831ee..c5af620c87 100644 --- a/openpype/hooks/pre_create_extra_workdir_folders.py +++ b/openpype/hooks/pre_create_extra_workdir_folders.py @@ -1,8 +1,6 @@ import os -from openpype.lib import ( - PreLaunchHook, - create_workdir_extra_folders -) +from openpype.lib import PreLaunchHook +from openpype.pipeline.workfile import create_workdir_extra_folders class AddLastWorkfileToLaunchArgs(PreLaunchHook): diff --git a/openpype/host/__init__.py b/openpype/host/__init__.py index 84a2fa930a..da1237c739 100644 --- a/openpype/host/__init__.py +++ b/openpype/host/__init__.py @@ -1,13 +1,24 @@ from .host import ( HostBase, +) + +from .interfaces import ( IWorkfileHost, ILoadHost, + IPublishHost, INewPublisher, ) +from .dirmap import HostDirmap + + __all__ = ( "HostBase", + "IWorkfileHost", "ILoadHost", + "IPublishHost", "INewPublisher", + + "HostDirmap", ) diff --git a/openpype/host/dirmap.py b/openpype/host/dirmap.py new file mode 100644 index 0000000000..88d68f27bf --- /dev/null +++ b/openpype/host/dirmap.py @@ -0,0 +1,205 @@ +"""Dirmap functionality used in host integrations inside DCCs. + +Idea for current dirmap implementation was used from Maya where is possible to +enter source and destination roots and maya will try each found source +in referenced file replace with each destionation paths. First path which +exists is used. +""" + +import os +from abc import ABCMeta, abstractmethod + +import six + +from openpype.lib import Logger +from openpype.modules import ModulesManager +from openpype.settings import get_project_settings +from openpype.settings.lib import get_site_local_overrides + + +@six.add_metaclass(ABCMeta) +class HostDirmap(object): + """Abstract class for running dirmap on a workfile in a host. + + Dirmap is used to translate paths inside of host workfile from one + OS to another. (Eg. arstist created workfile on Win, different artists + opens same file on Linux.) + + Expects methods to be implemented inside of host: + on_dirmap_enabled: run host code for enabling dirmap + do_dirmap: run host code to do actual remapping + """ + + def __init__( + self, host_name, project_name, project_settings=None, sync_module=None + ): + self.host_name = host_name + self.project_name = project_name + self._project_settings = project_settings + self._sync_module = sync_module # to limit reinit of Modules + self._log = None + self._mapping = None # cache mapping + + @property + def sync_module(self): + if self._sync_module is None: + manager = ModulesManager() + self._sync_module = manager["sync_server"] + return self._sync_module + + @property + def project_settings(self): + if self._project_settings is None: + self._project_settings = get_project_settings(self.project_name) + return self._project_settings + + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + + @abstractmethod + def on_enable_dirmap(self): + """Run host dependent operation for enabling dirmap if necessary.""" + pass + + @abstractmethod + def dirmap_routine(self, source_path, destination_path): + """Run host dependent remapping from source_path to destination_path""" + pass + + def process_dirmap(self): + # type: (dict) -> None + """Go through all paths in Settings and set them using `dirmap`. + + If artists has Site Sync enabled, take dirmap mapping directly from + Local Settings when artist is syncing workfile locally. + + Args: + project_settings (dict): Settings for current project. + """ + + if not self._mapping: + self._mapping = self.get_mappings(self.project_settings) + if not self._mapping: + return + + self.log.info("Processing directory mapping ...") + self.on_enable_dirmap() + self.log.info("mapping:: {}".format(self._mapping)) + + for k, sp in enumerate(self._mapping["source-path"]): + dst = self._mapping["destination-path"][k] + try: + print("{} -> {}".format(sp, dst)) + self.dirmap_routine(sp, dst) + except IndexError: + # missing corresponding destination path + self.log.error(( + "invalid dirmap mapping, missing corresponding" + " destination directory." + )) + break + except RuntimeError: + self.log.error( + "invalid path {} -> {}, mapping not registered".format( + sp, dst + ) + ) + continue + + def get_mappings(self, project_settings): + """Get translation from source-path to destination-path. + + It checks if Site Sync is enabled and user chose to use local + site, in that case configuration in Local Settings takes precedence + """ + + local_mapping = self._get_local_sync_dirmap(project_settings) + dirmap_label = "{}-dirmap".format(self.host_name) + if ( + not self.project_settings[self.host_name].get(dirmap_label) + and not local_mapping + ): + return {} + mapping_settings = self.project_settings[self.host_name][dirmap_label] + mapping_enabled = mapping_settings["enabled"] or bool(local_mapping) + if not mapping_enabled: + return {} + + mapping = ( + local_mapping + or mapping_settings["paths"] + or {} + ) + + if ( + not mapping + or not mapping.get("destination-path") + or not mapping.get("source-path") + ): + return {} + return mapping + + def _get_local_sync_dirmap(self, project_settings): + """ + Returns dirmap if synch to local project is enabled. + + Only valid mapping is from roots of remote site to local site set + in Local Settings. + + Args: + project_settings (dict) + Returns: + dict : { "source-path": [XXX], "destination-path": [YYYY]} + """ + + mapping = {} + + if not project_settings["global"]["sync_server"]["enabled"]: + return mapping + + project_name = os.getenv("AVALON_PROJECT") + + active_site = self.sync_module.get_local_normalized_site( + self.sync_module.get_active_site(project_name)) + remote_site = self.sync_module.get_local_normalized_site( + self.sync_module.get_remote_site(project_name)) + self.log.debug( + "active {} - remote {}".format(active_site, remote_site) + ) + + if ( + active_site == "local" + and project_name in self.sync_module.get_enabled_projects() + and active_site != remote_site + ): + sync_settings = self.sync_module.get_sync_project_setting( + project_name, + exclude_locals=False, + cached=False) + + active_overrides = get_site_local_overrides( + project_name, active_site) + remote_overrides = get_site_local_overrides( + project_name, remote_site) + + self.log.debug("local overrides {}".format(active_overrides)) + self.log.debug("remote overrides {}".format(remote_overrides)) + for root_name, active_site_dir in active_overrides.items(): + remote_site_dir = ( + remote_overrides.get(root_name) + or sync_settings["sites"][remote_site]["root"][root_name] + ) + if os.path.isdir(active_site_dir): + if "destination-path" not in mapping: + mapping["destination-path"] = [] + mapping["destination-path"].append(active_site_dir) + + if "source-path" not in mapping: + mapping["source-path"] = [] + mapping["source-path"].append(remote_site_dir) + + self.log.debug("local sync mapping:: {}".format(mapping)) + return mapping diff --git a/openpype/host/host.py b/openpype/host/host.py index 9cdbb819e1..99f7868727 100644 --- a/openpype/host/host.py +++ b/openpype/host/host.py @@ -1,37 +1,12 @@ import logging import contextlib -from abc import ABCMeta, abstractproperty, abstractmethod +from abc import ABCMeta, abstractproperty import six # NOTE can't import 'typing' because of issues in Maya 2020 # - shiboken crashes on 'typing' module import -class MissingMethodsError(ValueError): - """Exception when host miss some required methods for specific workflow. - - Args: - host (HostBase): Host implementation where are missing methods. - missing_methods (list[str]): List of missing methods. - """ - - def __init__(self, host, missing_methods): - joined_missing = ", ".join( - ['"{}"'.format(item) for item in missing_methods] - ) - if isinstance(host, HostBase): - host_name = host.name - else: - try: - host_name = host.__file__.replace("\\", "/").split("/")[-3] - except Exception: - host_name = str(host) - message = ( - "Host \"{}\" miss methods {}".format(host_name, joined_missing) - ) - super(MissingMethodsError, self).__init__(message) - - @six.add_metaclass(ABCMeta) class HostBase(object): """Base of host implementation class. @@ -185,347 +160,3 @@ class HostBase(object): yield finally: pass - - -class ILoadHost: - """Implementation requirements to be able use reference of representations. - - The load plugins can do referencing even without implementation of methods - here, but switch and removement of containers would not be possible. - - Questions: - - Is list container dependency of host or load plugins? - - Should this be directly in HostBase? - - how to find out if referencing is available? - - do we need to know that? - """ - - @staticmethod - def get_missing_load_methods(host): - """Look for missing methods on "old type" host implementation. - - Method is used for validation of implemented functions related to - loading. Checks only existence of methods. - - Args: - Union[ModuleType, HostBase]: Object of host where to look for - required methods. - - Returns: - list[str]: Missing method implementations for loading workflow. - """ - - if isinstance(host, ILoadHost): - return [] - - required = ["ls"] - missing = [] - for name in required: - if not hasattr(host, name): - missing.append(name) - return missing - - @staticmethod - def validate_load_methods(host): - """Validate implemented methods of "old type" host for load workflow. - - Args: - Union[ModuleType, HostBase]: Object of host to validate. - - Raises: - MissingMethodsError: If there are missing methods on host - implementation. - """ - missing = ILoadHost.get_missing_load_methods(host) - if missing: - raise MissingMethodsError(host, missing) - - @abstractmethod - def get_containers(self): - """Retreive referenced containers from scene. - - This can be implemented in hosts where referencing can be used. - - Todo: - Rename function to something more self explanatory. - Suggestion: 'get_containers' - - Returns: - list[dict]: Information about loaded containers. - """ - - pass - - # --- Deprecated method names --- - def ls(self): - """Deprecated variant of 'get_containers'. - - Todo: - Remove when all usages are replaced. - """ - - return self.get_containers() - - -@six.add_metaclass(ABCMeta) -class IWorkfileHost: - """Implementation requirements to be able use workfile utils and tool.""" - - @staticmethod - def get_missing_workfile_methods(host): - """Look for missing methods on "old type" host implementation. - - Method is used for validation of implemented functions related to - workfiles. Checks only existence of methods. - - Args: - Union[ModuleType, HostBase]: Object of host where to look for - required methods. - - Returns: - list[str]: Missing method implementations for workfiles workflow. - """ - - if isinstance(host, IWorkfileHost): - return [] - - required = [ - "open_file", - "save_file", - "current_file", - "has_unsaved_changes", - "file_extensions", - "work_root", - ] - missing = [] - for name in required: - if not hasattr(host, name): - missing.append(name) - return missing - - @staticmethod - def validate_workfile_methods(host): - """Validate methods of "old type" host for workfiles workflow. - - Args: - Union[ModuleType, HostBase]: Object of host to validate. - - Raises: - MissingMethodsError: If there are missing methods on host - implementation. - """ - - missing = IWorkfileHost.get_missing_workfile_methods(host) - if missing: - raise MissingMethodsError(host, missing) - - @abstractmethod - def get_workfile_extensions(self): - """Extensions that can be used as save. - - Questions: - This could potentially use 'HostDefinition'. - """ - - return [] - - @abstractmethod - def save_workfile(self, dst_path=None): - """Save currently opened scene. - - Args: - dst_path (str): Where the current scene should be saved. Or use - current path if 'None' is passed. - """ - - pass - - @abstractmethod - def open_workfile(self, filepath): - """Open passed filepath in the host. - - Args: - filepath (str): Path to workfile. - """ - - pass - - @abstractmethod - def get_current_workfile(self): - """Retreive path to current opened file. - - Returns: - str: Path to file which is currently opened. - None: If nothing is opened. - """ - - return None - - def workfile_has_unsaved_changes(self): - """Currently opened scene is saved. - - Not all hosts can know if current scene is saved because the API of - DCC does not support it. - - Returns: - bool: True if scene is saved and False if has unsaved - modifications. - None: Can't tell if workfiles has modifications. - """ - - return None - - def work_root(self, session): - """Modify workdir per host. - - Default implementation keeps workdir untouched. - - Warnings: - We must handle this modification with more sofisticated way because - this can't be called out of DCC so opening of last workfile - (calculated before DCC is launched) is complicated. Also breaking - defined work template is not a good idea. - Only place where it's really used and can make sense is Maya. There - workspace.mel can modify subfolders where to look for maya files. - - Args: - session (dict): Session context data. - - Returns: - str: Path to new workdir. - """ - - return session["AVALON_WORKDIR"] - - # --- Deprecated method names --- - def file_extensions(self): - """Deprecated variant of 'get_workfile_extensions'. - - Todo: - Remove when all usages are replaced. - """ - return self.get_workfile_extensions() - - def save_file(self, dst_path=None): - """Deprecated variant of 'save_workfile'. - - Todo: - Remove when all usages are replaced. - """ - - self.save_workfile() - - def open_file(self, filepath): - """Deprecated variant of 'open_workfile'. - - Todo: - Remove when all usages are replaced. - """ - - return self.open_workfile(filepath) - - def current_file(self): - """Deprecated variant of 'get_current_workfile'. - - Todo: - Remove when all usages are replaced. - """ - - return self.get_current_workfile() - - def has_unsaved_changes(self): - """Deprecated variant of 'workfile_has_unsaved_changes'. - - Todo: - Remove when all usages are replaced. - """ - - return self.workfile_has_unsaved_changes() - - -class INewPublisher: - """Functions related to new creation system in new publisher. - - New publisher is not storing information only about each created instance - but also some global data. At this moment are data related only to context - publish plugins but that can extend in future. - """ - - @staticmethod - def get_missing_publish_methods(host): - """Look for missing methods on "old type" host implementation. - - Method is used for validation of implemented functions related to - new publish creation. Checks only existence of methods. - - Args: - Union[ModuleType, HostBase]: Host module where to look for - required methods. - - Returns: - list[str]: Missing method implementations for new publsher - workflow. - """ - - if isinstance(host, INewPublisher): - return [] - - required = [ - "get_context_data", - "update_context_data", - ] - missing = [] - for name in required: - if not hasattr(host, name): - missing.append(name) - return missing - - @staticmethod - def validate_publish_methods(host): - """Validate implemented methods of "old type" host. - - Args: - Union[ModuleType, HostBase]: Host module to validate. - - Raises: - MissingMethodsError: If there are missing methods on host - implementation. - """ - missing = INewPublisher.get_missing_publish_methods(host) - if missing: - raise MissingMethodsError(host, missing) - - @abstractmethod - def get_context_data(self): - """Get global data related to creation-publishing from workfile. - - These data are not related to any created instance but to whole - publishing context. Not saving/returning them will cause that each - reset of publishing resets all values to default ones. - - Context data can contain information about enabled/disabled publish - plugins or other values that can be filled by artist. - - Returns: - dict: Context data stored using 'update_context_data'. - """ - - pass - - @abstractmethod - def update_context_data(self, data, changes): - """Store global context data to workfile. - - Called when some values in context data has changed. - - Without storing the values in a way that 'get_context_data' would - return them will each reset of publishing cause loose of filled values - by artist. Best practice is to store values into workfile, if possible. - - Args: - data (dict): New data as are. - changes (dict): Only data that has been changed. Each value has - tuple with '(, )' value. - """ - - pass diff --git a/openpype/host/interfaces.py b/openpype/host/interfaces.py new file mode 100644 index 0000000000..3b2df745d1 --- /dev/null +++ b/openpype/host/interfaces.py @@ -0,0 +1,386 @@ +from abc import ABCMeta, abstractmethod +import six + + +class MissingMethodsError(ValueError): + """Exception when host miss some required methods for specific workflow. + + Args: + host (HostBase): Host implementation where are missing methods. + missing_methods (list[str]): List of missing methods. + """ + + def __init__(self, host, missing_methods): + joined_missing = ", ".join( + ['"{}"'.format(item) for item in missing_methods] + ) + host_name = getattr(host, "name", None) + if not host_name: + try: + host_name = host.__file__.replace("\\", "/").split("/")[-3] + except Exception: + host_name = str(host) + message = ( + "Host \"{}\" miss methods {}".format(host_name, joined_missing) + ) + super(MissingMethodsError, self).__init__(message) + + +class ILoadHost: + """Implementation requirements to be able use reference of representations. + + The load plugins can do referencing even without implementation of methods + here, but switch and removement of containers would not be possible. + + Questions: + - Is list container dependency of host or load plugins? + - Should this be directly in HostBase? + - how to find out if referencing is available? + - do we need to know that? + """ + + @staticmethod + def get_missing_load_methods(host): + """Look for missing methods on "old type" host implementation. + + Method is used for validation of implemented functions related to + loading. Checks only existence of methods. + + Args: + Union[ModuleType, HostBase]: Object of host where to look for + required methods. + + Returns: + list[str]: Missing method implementations for loading workflow. + """ + + if isinstance(host, ILoadHost): + return [] + + required = ["ls"] + missing = [] + for name in required: + if not hasattr(host, name): + missing.append(name) + return missing + + @staticmethod + def validate_load_methods(host): + """Validate implemented methods of "old type" host for load workflow. + + Args: + Union[ModuleType, HostBase]: Object of host to validate. + + Raises: + MissingMethodsError: If there are missing methods on host + implementation. + """ + missing = ILoadHost.get_missing_load_methods(host) + if missing: + raise MissingMethodsError(host, missing) + + @abstractmethod + def get_containers(self): + """Retreive referenced containers from scene. + + This can be implemented in hosts where referencing can be used. + + Todo: + Rename function to something more self explanatory. + Suggestion: 'get_containers' + + Returns: + list[dict]: Information about loaded containers. + """ + + pass + + # --- Deprecated method names --- + def ls(self): + """Deprecated variant of 'get_containers'. + + Todo: + Remove when all usages are replaced. + """ + + return self.get_containers() + + +@six.add_metaclass(ABCMeta) +class IWorkfileHost: + """Implementation requirements to be able use workfile utils and tool.""" + + @staticmethod + def get_missing_workfile_methods(host): + """Look for missing methods on "old type" host implementation. + + Method is used for validation of implemented functions related to + workfiles. Checks only existence of methods. + + Args: + Union[ModuleType, HostBase]: Object of host where to look for + required methods. + + Returns: + list[str]: Missing method implementations for workfiles workflow. + """ + + if isinstance(host, IWorkfileHost): + return [] + + required = [ + "open_file", + "save_file", + "current_file", + "has_unsaved_changes", + "file_extensions", + "work_root", + ] + missing = [] + for name in required: + if not hasattr(host, name): + missing.append(name) + return missing + + @staticmethod + def validate_workfile_methods(host): + """Validate methods of "old type" host for workfiles workflow. + + Args: + Union[ModuleType, HostBase]: Object of host to validate. + + Raises: + MissingMethodsError: If there are missing methods on host + implementation. + """ + + missing = IWorkfileHost.get_missing_workfile_methods(host) + if missing: + raise MissingMethodsError(host, missing) + + @abstractmethod + def get_workfile_extensions(self): + """Extensions that can be used as save. + + Questions: + This could potentially use 'HostDefinition'. + """ + + return [] + + @abstractmethod + def save_workfile(self, dst_path=None): + """Save currently opened scene. + + Args: + dst_path (str): Where the current scene should be saved. Or use + current path if 'None' is passed. + """ + + pass + + @abstractmethod + def open_workfile(self, filepath): + """Open passed filepath in the host. + + Args: + filepath (str): Path to workfile. + """ + + pass + + @abstractmethod + def get_current_workfile(self): + """Retreive path to current opened file. + + Returns: + str: Path to file which is currently opened. + None: If nothing is opened. + """ + + return None + + def workfile_has_unsaved_changes(self): + """Currently opened scene is saved. + + Not all hosts can know if current scene is saved because the API of + DCC does not support it. + + Returns: + bool: True if scene is saved and False if has unsaved + modifications. + None: Can't tell if workfiles has modifications. + """ + + return None + + def work_root(self, session): + """Modify workdir per host. + + Default implementation keeps workdir untouched. + + Warnings: + We must handle this modification with more sofisticated way because + this can't be called out of DCC so opening of last workfile + (calculated before DCC is launched) is complicated. Also breaking + defined work template is not a good idea. + Only place where it's really used and can make sense is Maya. There + workspace.mel can modify subfolders where to look for maya files. + + Args: + session (dict): Session context data. + + Returns: + str: Path to new workdir. + """ + + return session["AVALON_WORKDIR"] + + # --- Deprecated method names --- + def file_extensions(self): + """Deprecated variant of 'get_workfile_extensions'. + + Todo: + Remove when all usages are replaced. + """ + return self.get_workfile_extensions() + + def save_file(self, dst_path=None): + """Deprecated variant of 'save_workfile'. + + Todo: + Remove when all usages are replaced. + """ + + self.save_workfile() + + def open_file(self, filepath): + """Deprecated variant of 'open_workfile'. + + Todo: + Remove when all usages are replaced. + """ + + return self.open_workfile(filepath) + + def current_file(self): + """Deprecated variant of 'get_current_workfile'. + + Todo: + Remove when all usages are replaced. + """ + + return self.get_current_workfile() + + def has_unsaved_changes(self): + """Deprecated variant of 'workfile_has_unsaved_changes'. + + Todo: + Remove when all usages are replaced. + """ + + return self.workfile_has_unsaved_changes() + + +class IPublishHost: + """Functions related to new creation system in new publisher. + + New publisher is not storing information only about each created instance + but also some global data. At this moment are data related only to context + publish plugins but that can extend in future. + """ + + @staticmethod + def get_missing_publish_methods(host): + """Look for missing methods on "old type" host implementation. + + Method is used for validation of implemented functions related to + new publish creation. Checks only existence of methods. + + Args: + Union[ModuleType, HostBase]: Host module where to look for + required methods. + + Returns: + list[str]: Missing method implementations for new publsher + workflow. + """ + + if isinstance(host, IPublishHost): + return [] + + required = [ + "get_context_data", + "update_context_data", + "get_context_title", + "get_current_context", + ] + missing = [] + for name in required: + if not hasattr(host, name): + missing.append(name) + return missing + + @staticmethod + def validate_publish_methods(host): + """Validate implemented methods of "old type" host. + + Args: + Union[ModuleType, HostBase]: Host module to validate. + + Raises: + MissingMethodsError: If there are missing methods on host + implementation. + """ + missing = IPublishHost.get_missing_publish_methods(host) + if missing: + raise MissingMethodsError(host, missing) + + @abstractmethod + def get_context_data(self): + """Get global data related to creation-publishing from workfile. + + These data are not related to any created instance but to whole + publishing context. Not saving/returning them will cause that each + reset of publishing resets all values to default ones. + + Context data can contain information about enabled/disabled publish + plugins or other values that can be filled by artist. + + Returns: + dict: Context data stored using 'update_context_data'. + """ + + pass + + @abstractmethod + def update_context_data(self, data, changes): + """Store global context data to workfile. + + Called when some values in context data has changed. + + Without storing the values in a way that 'get_context_data' would + return them will each reset of publishing cause loose of filled values + by artist. Best practice is to store values into workfile, if possible. + + Args: + data (dict): New data as are. + changes (dict): Only data that has been changed. Each value has + tuple with '(, )' value. + """ + + pass + + +class INewPublisher(IPublishHost): + """Legacy interface replaced by 'IPublishHost'. + + Deprecated: + 'INewPublisher' is replaced by 'IPublishHost' please change your + imports. + There is no "reasonable" way hot mark these classes as deprecated + to show warning of wrong import. Deprecated since 3.14.* will be + removed in 3.15.* + """ + + pass diff --git a/openpype/hosts/aftereffects/__init__.py b/openpype/hosts/aftereffects/__init__.py index deae48d122..ae750d05b6 100644 --- a/openpype/hosts/aftereffects/__init__.py +++ b/openpype/hosts/aftereffects/__init__.py @@ -1,9 +1,6 @@ -def add_implementation_envs(env, _app): - """Modify environments to contain all required for implementation.""" - defaults = { - "OPENPYPE_LOG_NO_COLORS": "True", - "WEBSOCKET_URL": "ws://localhost:8097/ws/" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value +from .addon import AfterEffectsAddon + + +__all__ = ( + "AfterEffectsAddon", +) diff --git a/openpype/hosts/aftereffects/addon.py b/openpype/hosts/aftereffects/addon.py new file mode 100644 index 0000000000..94843e7dc5 --- /dev/null +++ b/openpype/hosts/aftereffects/addon.py @@ -0,0 +1,23 @@ +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + + +class AfterEffectsAddon(OpenPypeModule, IHostAddon): + name = "aftereffects" + host_name = "aftereffects" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + """Modify environments to contain all required for implementation.""" + defaults = { + "OPENPYPE_LOG_NO_COLORS": "True", + "WEBSOCKET_URL": "ws://localhost:8097/ws/" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_workfile_extensions(self): + return [".aep"] diff --git a/openpype/hosts/aftereffects/api/launch_logic.py b/openpype/hosts/aftereffects/api/launch_logic.py index 30a3e1f1c3..9c8513fe8c 100644 --- a/openpype/hosts/aftereffects/api/launch_logic.py +++ b/openpype/hosts/aftereffects/api/launch_logic.py @@ -12,6 +12,7 @@ from wsrpc_aiohttp import ( from Qt import QtCore +from openpype.lib import Logger from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools from openpype.tools.adobe_webserver.app import WebServerTool @@ -84,8 +85,6 @@ class ProcessLauncher(QtCore.QObject): @property def log(self): if self._log is None: - from openpype.api import Logger - self._log = Logger.get_logger("{}-launcher".format( self.route_name)) return self._log diff --git a/openpype/hosts/aftereffects/api/lib.py b/openpype/hosts/aftereffects/api/lib.py index ce4cbf09af..8cdf9c407e 100644 --- a/openpype/hosts/aftereffects/api/lib.py +++ b/openpype/hosts/aftereffects/api/lib.py @@ -1,13 +1,16 @@ import os import sys +import re +import json import contextlib import traceback import logging +from functools import partial from Qt import QtWidgets from openpype.pipeline import install_host -from openpype.lib.remote_publish import headless_publish +from openpype.modules import ModulesManager from openpype.tools.utils import host_tools from .launch_logic import ProcessLauncher, get_stub @@ -35,10 +38,18 @@ def main(*subprocess_args): launcher.start() if os.environ.get("HEADLESS_PUBLISH"): - launcher.execute_in_main_thread(lambda: headless_publish( - log, - "CloseAE", - os.environ.get("IS_TEST"))) + manager = ModulesManager() + webpublisher_addon = manager["webpublisher"] + + launcher.execute_in_main_thread( + partial( + webpublisher_addon.headless_publish, + log, + "CloseAE", + os.environ.get("IS_TEST") + ) + ) + elif os.environ.get("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH", True): save = False if os.getenv("WORKFILES_SAVE_AS"): @@ -68,3 +79,57 @@ def get_extension_manifest_path(): "CSXS", "manifest.xml" ) + + +def get_unique_layer_name(layers, name): + """ + Gets all layer names and if 'name' is present in them, increases + suffix by 1 (eg. creates unique layer name - for Loader) + Args: + layers (list): of strings, names only + name (string): checked value + + Returns: + (string): name_00X (without version) + """ + names = {} + for layer in layers: + layer_name = re.sub(r'_\d{3}$', '', layer) + if layer_name in names.keys(): + names[layer_name] = names[layer_name] + 1 + else: + names[layer_name] = 1 + occurrences = names.get(name, 0) + + return "{}_{:0>3d}".format(name, occurrences + 1) + + +def get_background_layers(file_url): + """ + Pulls file name from background json file, enrich with folder url for + AE to be able import files. + + Order is important, follows order in json. + + Args: + file_url (str): abs url of background json + + Returns: + (list): of abs paths to images + """ + with open(file_url) as json_file: + data = json.load(json_file) + + layers = list() + bg_folder = os.path.dirname(file_url) + for child in data['children']: + if child.get("filename"): + layers.append(os.path.join(bg_folder, child.get("filename")). + replace("\\", "/")) + else: + for layer in child['children']: + if layer.get("filename"): + layers.append(os.path.join(bg_folder, + layer.get("filename")). + replace("\\", "/")) + return layers diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index c13c22ced5..7026fe3f05 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -4,8 +4,7 @@ from Qt import QtWidgets import pyblish.api -from openpype import lib -from openpype.api import Logger +from openpype.lib import Logger, register_event_callback from openpype.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, @@ -16,9 +15,8 @@ from openpype.pipeline import ( ) from openpype.pipeline.load import any_outdated_containers import openpype.hosts.aftereffects -from openpype.lib import register_event_callback -from .launch_logic import get_stub +from .launch_logic import get_stub, ConnectionNotEstablishedYet log = Logger.get_logger(__name__) @@ -111,7 +109,7 @@ def ls(): """ try: stub = get_stub() # only after AfterEffects is up - except lib.ConnectionNotEstablishedYet: + except ConnectionNotEstablishedYet: print("Not connected yet, ignoring") return @@ -284,7 +282,7 @@ def _get_stub(): """ try: stub = get_stub() # only after Photoshop is up - except lib.ConnectionNotEstablishedYet: + except ConnectionNotEstablishedYet: print("Not connected yet, ignoring") return diff --git a/openpype/hosts/aftereffects/api/workio.py b/openpype/hosts/aftereffects/api/workio.py index d6c732285a..18b40af5dc 100644 --- a/openpype/hosts/aftereffects/api/workio.py +++ b/openpype/hosts/aftereffects/api/workio.py @@ -1,12 +1,11 @@ """Host API required Work Files tool""" import os -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS from .launch_logic import get_stub def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["aftereffects"] + return [".aep"] def has_unsaved_changes(): diff --git a/openpype/hosts/aftereffects/plugins/load/load_background.py b/openpype/hosts/aftereffects/plugins/load/load_background.py index d346df504a..260e780be0 100644 --- a/openpype/hosts/aftereffects/plugins/load/load_background.py +++ b/openpype/hosts/aftereffects/plugins/load/load_background.py @@ -1,14 +1,14 @@ import re -from openpype.lib import ( - get_background_layers, - get_unique_layer_name -) from openpype.pipeline import get_representation_path from openpype.hosts.aftereffects.api import ( AfterEffectsLoader, containerise ) +from openpype.hosts.aftereffects.api.lib import ( + get_background_layers, + get_unique_layer_name, +) class BackgroundLoader(AfterEffectsLoader): diff --git a/openpype/hosts/aftereffects/plugins/load/load_file.py b/openpype/hosts/aftereffects/plugins/load/load_file.py index 6ab69c6bfa..2ddc9825e5 100644 --- a/openpype/hosts/aftereffects/plugins/load/load_file.py +++ b/openpype/hosts/aftereffects/plugins/load/load_file.py @@ -1,12 +1,11 @@ import re -from openpype import lib - from openpype.pipeline import get_representation_path from openpype.hosts.aftereffects.api import ( AfterEffectsLoader, containerise ) +from openpype.hosts.aftereffects.api.lib import get_unique_layer_name class FileLoader(AfterEffectsLoader): @@ -28,7 +27,7 @@ class FileLoader(AfterEffectsLoader): stub = self.get_stub() layers = stub.get_items(comps=True, folders=True, footages=True) existing_layers = [layer.name for layer in layers] - comp_name = lib.get_unique_layer_name( + comp_name = get_unique_layer_name( existing_layers, "{}_{}".format(context["asset"]["name"], name)) import_options = {} @@ -87,7 +86,7 @@ class FileLoader(AfterEffectsLoader): if namespace_from_container != layer_name: layers = stub.get_items(comps=True) existing_layers = [layer.name for layer in layers] - layer_name = lib.get_unique_layer_name( + layer_name = get_unique_layer_name( existing_layers, "{}_{}".format(context["asset"], context["subset"])) else: # switching version - keep same name diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index fef5448a4c..3c5013b3bd 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -1,8 +1,8 @@ import os import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc from openpype.pipeline import legacy_io +from openpype.pipeline.create import get_subset_name class CollectWorkfile(pyblish.api.ContextPlugin): @@ -71,13 +71,14 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # workfile instance family = "workfile" - subset = get_subset_name_with_asset_doc( + subset = get_subset_name( family, self.default_variant, context.data["anatomyData"]["task"]["name"], context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"] + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] ) # Create instance instance = context.create_instance(subset) diff --git a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py index 7323a0b125..dc65cee61d 100644 --- a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py @@ -2,14 +2,18 @@ import os import sys import six -import openpype.api +from openpype.lib import ( + get_ffmpeg_tool_path, + run_subprocess, +) +from openpype.pipeline import publish from openpype.hosts.aftereffects.api import get_stub -class ExtractLocalRender(openpype.api.Extractor): +class ExtractLocalRender(publish.Extractor): """Render RenderQueue locally.""" - order = openpype.api.Extractor.order - 0.47 + order = publish.Extractor.order - 0.47 label = "Extract Local Render" hosts = ["aftereffects"] families = ["renderLocal", "render.local"] @@ -53,7 +57,7 @@ class ExtractLocalRender(openpype.api.Extractor): instance.data["representations"] = [repre_data] - ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg") + ffmpeg_path = get_ffmpeg_tool_path("ffmpeg") # Generate thumbnail. thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg") @@ -66,7 +70,7 @@ class ExtractLocalRender(openpype.api.Extractor): ] self.log.debug("Thumbnail args:: {}".format(args)) try: - output = openpype.lib.run_subprocess(args) + output = run_subprocess(args) except TypeError: self.log.warning("Error in creating thumbnail") six.reraise(*sys.exc_info()) diff --git a/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py b/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py index eb2977309f..343838eb49 100644 --- a/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py +++ b/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py @@ -1,13 +1,13 @@ import pyblish.api -import openpype.api +from openpype.pipeline import publish from openpype.hosts.aftereffects.api import get_stub class ExtractSaveScene(pyblish.api.ContextPlugin): """Save scene before extraction.""" - order = openpype.api.Extractor.order - 0.48 + order = publish.Extractor.order - 0.48 label = "Extract Save Scene" hosts = ["aftereffects"] diff --git a/openpype/hosts/aftereffects/plugins/publish/increment_workfile.py b/openpype/hosts/aftereffects/plugins/publish/increment_workfile.py index 0829355f3b..d8f6ef5d27 100644 --- a/openpype/hosts/aftereffects/plugins/publish/increment_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/increment_workfile.py @@ -1,6 +1,6 @@ import pyblish.api -from openpype.action import get_errored_plugins_from_data from openpype.lib import version_up +from openpype.pipeline.publish import get_errored_plugins_from_context from openpype.hosts.aftereffects.api import get_stub @@ -18,7 +18,7 @@ class IncrementWorkfile(pyblish.api.InstancePlugin): optional = True def process(self, instance): - errored_plugins = get_errored_plugins_from_data(instance.context) + errored_plugins = get_errored_plugins_from_context(instance.context) if errored_plugins: raise RuntimeError( "Skipping incrementing current file because publishing failed." diff --git a/openpype/hosts/aftereffects/plugins/publish/remove_publish_highlight.py b/openpype/hosts/aftereffects/plugins/publish/remove_publish_highlight.py index 5f3fcc3089..370f916f04 100644 --- a/openpype/hosts/aftereffects/plugins/publish/remove_publish_highlight.py +++ b/openpype/hosts/aftereffects/plugins/publish/remove_publish_highlight.py @@ -1,8 +1,8 @@ -import openpype.api +from openpype.pipeline import publish from openpype.hosts.aftereffects.api import get_stub -class RemovePublishHighlight(openpype.api.Extractor): +class RemovePublishHighlight(publish.Extractor): """Clean utf characters which are not working in DL Published compositions are marked with unicode icon which causes @@ -10,7 +10,7 @@ class RemovePublishHighlight(openpype.api.Extractor): rendering, add it later back to avoid confusion. """ - order = openpype.api.Extractor.order - 0.49 # just before save + order = publish.Extractor.order - 0.49 # just before save label = "Clean render comp" hosts = ["aftereffects"] families = ["render.farm"] diff --git a/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py index 7a9356f020..6c36136b20 100644 --- a/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py +++ b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py @@ -1,9 +1,9 @@ import pyblish.api -import openpype.api -from openpype.pipeline import ( +from openpype.pipeline import legacy_io +from openpype.pipeline.publish import ( + ValidateContentsOrder, PublishXmlValidationError, - legacy_io, ) from openpype.hosts.aftereffects.api import get_stub @@ -50,7 +50,7 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin): label = "Validate Instance Asset" hosts = ["aftereffects"] actions = [ValidateInstanceAssetRepair] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder def process(self, instance): instance_asset = instance.data["asset"] diff --git a/openpype/hosts/blender/__init__.py b/openpype/hosts/blender/__init__.py index 0f27882c7e..2a6603606a 100644 --- a/openpype/hosts/blender/__init__.py +++ b/openpype/hosts/blender/__init__.py @@ -1,52 +1,6 @@ -import os +from .addon import BlenderAddon -def add_implementation_envs(env, _app): - """Modify environments to contain all required for implementation.""" - # Prepare path to implementation script - implementation_user_script_path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), - "blender_addon" - ) - - # Add blender implementation script path to PYTHONPATH - python_path = env.get("PYTHONPATH") or "" - python_path_parts = [ - path - for path in python_path.split(os.pathsep) - if path - ] - python_path_parts.insert(0, implementation_user_script_path) - env["PYTHONPATH"] = os.pathsep.join(python_path_parts) - - # Modify Blender user scripts path - previous_user_scripts = set() - # Implementation path is added to set for easier paths check inside loops - # - will be removed at the end - previous_user_scripts.add(implementation_user_script_path) - - openpype_blender_user_scripts = ( - env.get("OPENPYPE_BLENDER_USER_SCRIPTS") or "" - ) - for path in openpype_blender_user_scripts.split(os.pathsep): - if path: - previous_user_scripts.add(os.path.normpath(path)) - - blender_user_scripts = env.get("BLENDER_USER_SCRIPTS") or "" - for path in blender_user_scripts.split(os.pathsep): - if path: - previous_user_scripts.add(os.path.normpath(path)) - - # Remove implementation path from user script paths as is set to - # `BLENDER_USER_SCRIPTS` - previous_user_scripts.remove(implementation_user_script_path) - env["BLENDER_USER_SCRIPTS"] = implementation_user_script_path - - # Set custom user scripts env - env["OPENPYPE_BLENDER_USER_SCRIPTS"] = os.pathsep.join( - previous_user_scripts - ) - - # Define Qt binding if not defined - if not env.get("QT_PREFERRED_BINDING"): - env["QT_PREFERRED_BINDING"] = "PySide2" +__all__ = ( + "BlenderAddon", +) diff --git a/openpype/hosts/blender/addon.py b/openpype/hosts/blender/addon.py new file mode 100644 index 0000000000..3ee638a5bb --- /dev/null +++ b/openpype/hosts/blender/addon.py @@ -0,0 +1,73 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +BLENDER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class BlenderAddon(OpenPypeModule, IHostAddon): + name = "blender" + host_name = "blender" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + """Modify environments to contain all required for implementation.""" + # Prepare path to implementation script + implementation_user_script_path = os.path.join( + BLENDER_ROOT_DIR, + "blender_addon" + ) + + # Add blender implementation script path to PYTHONPATH + python_path = env.get("PYTHONPATH") or "" + python_path_parts = [ + path + for path in python_path.split(os.pathsep) + if path + ] + python_path_parts.insert(0, implementation_user_script_path) + env["PYTHONPATH"] = os.pathsep.join(python_path_parts) + + # Modify Blender user scripts path + previous_user_scripts = set() + # Implementation path is added to set for easier paths check inside + # loops - will be removed at the end + previous_user_scripts.add(implementation_user_script_path) + + openpype_blender_user_scripts = ( + env.get("OPENPYPE_BLENDER_USER_SCRIPTS") or "" + ) + for path in openpype_blender_user_scripts.split(os.pathsep): + if path: + previous_user_scripts.add(os.path.normpath(path)) + + blender_user_scripts = env.get("BLENDER_USER_SCRIPTS") or "" + for path in blender_user_scripts.split(os.pathsep): + if path: + previous_user_scripts.add(os.path.normpath(path)) + + # Remove implementation path from user script paths as is set to + # `BLENDER_USER_SCRIPTS` + previous_user_scripts.remove(implementation_user_script_path) + env["BLENDER_USER_SCRIPTS"] = implementation_user_script_path + + # Set custom user scripts env + env["OPENPYPE_BLENDER_USER_SCRIPTS"] = os.pathsep.join( + previous_user_scripts + ) + + # Define Qt binding if not defined + if not env.get("QT_PREFERRED_BINDING"): + env["QT_PREFERRED_BINDING"] = "PySide2" + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(BLENDER_ROOT_DIR, "hooks") + ] + + def get_workfile_extensions(self): + return [".blend"] diff --git a/openpype/hosts/blender/api/action.py b/openpype/hosts/blender/api/action.py index 09ef76326e..fe0833e39f 100644 --- a/openpype/hosts/blender/api/action.py +++ b/openpype/hosts/blender/api/action.py @@ -2,7 +2,7 @@ import bpy import pyblish.api -from openpype.api import get_errored_instances_from_context +from openpype.pipeline.publish import get_errored_instances_from_context class SelectInvalidAction(pyblish.api.Action): diff --git a/openpype/hosts/blender/api/lib.py b/openpype/hosts/blender/api/lib.py index 20098c0fe8..05912885f7 100644 --- a/openpype/hosts/blender/api/lib.py +++ b/openpype/hosts/blender/api/lib.py @@ -6,7 +6,7 @@ from typing import Dict, List, Union import bpy import addon_utils -from openpype.api import Logger +from openpype.lib import Logger from . import pipeline @@ -234,7 +234,7 @@ def lsattrs(attrs: Dict) -> List: def read(node: bpy.types.bpy_struct_meta_idprop): """Return user-defined attributes from `node`""" - data = dict(node.get(pipeline.AVALON_PROPERTY)) + data = dict(node.get(pipeline.AVALON_PROPERTY, {})) # Ignore hidden/internal data data = { diff --git a/openpype/hosts/blender/api/ops.py b/openpype/hosts/blender/api/ops.py index 4f8410da74..e0e09277df 100644 --- a/openpype/hosts/blender/api/ops.py +++ b/openpype/hosts/blender/api/ops.py @@ -26,7 +26,7 @@ PREVIEW_COLLECTIONS: Dict = dict() # This seems like a good value to keep the Qt app responsive and doesn't slow # down Blender. At least on macOS I the interace of Blender gets very laggy if # you make it smaller. -TIMER_INTERVAL: float = 0.01 +TIMER_INTERVAL: float = 0.01 if platform.system() == "Windows" else 0.1 class BlenderApplication(QtWidgets.QApplication): @@ -164,6 +164,12 @@ def _process_app_events() -> Optional[float]: dialog.setDetailedText(detail) dialog.exec_() + # Refresh Manager + if GlobalClass.app: + manager = GlobalClass.app.get_window("WM_OT_avalon_manager") + if manager: + manager.refresh() + if not GlobalClass.is_windows: if OpenFileCacher.opening_file: return TIMER_INTERVAL @@ -192,10 +198,11 @@ class LaunchQtApp(bpy.types.Operator): self._app = BlenderApplication.get_app() GlobalClass.app = self._app - bpy.app.timers.register( - _process_app_events, - persistent=True - ) + if not bpy.app.timers.is_registered(_process_app_events): + bpy.app.timers.register( + _process_app_events, + persistent=True + ) def execute(self, context): """Execute the operator. diff --git a/openpype/hosts/blender/api/pipeline.py b/openpype/hosts/blender/api/pipeline.py index ea405b028e..c2aee1e653 100644 --- a/openpype/hosts/blender/api/pipeline.py +++ b/openpype/hosts/blender/api/pipeline.py @@ -20,8 +20,8 @@ from openpype.pipeline import ( deregister_creator_plugin_path, AVALON_CONTAINER_ID, ) -from openpype.api import Logger from openpype.lib import ( + Logger, register_event_callback, emit_event ) diff --git a/openpype/hosts/blender/api/workio.py b/openpype/hosts/blender/api/workio.py index 5eb9f82999..a8f6193abc 100644 --- a/openpype/hosts/blender/api/workio.py +++ b/openpype/hosts/blender/api/workio.py @@ -5,8 +5,6 @@ from typing import List, Optional import bpy -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS - class OpenFileCacher: """Store information about opening file. @@ -78,7 +76,7 @@ def has_unsaved_changes() -> bool: def file_extensions() -> List[str]: """Return the supported file extensions for Blender scene files.""" - return HOST_WORKFILE_EXTENSIONS["blender"] + return [".blend"] def work_root(session: dict) -> str: diff --git a/openpype/hosts/blender/blender_addon/startup/init.py b/openpype/hosts/blender/blender_addon/startup/init.py index 13a4b8a7a1..8dbff8a91d 100644 --- a/openpype/hosts/blender/blender_addon/startup/init.py +++ b/openpype/hosts/blender/blender_addon/startup/init.py @@ -1,4 +1,10 @@ from openpype.pipeline import install_host from openpype.hosts.blender import api -install_host(api) + +def register(): + install_host(api) + + +def unregister(): + pass diff --git a/openpype/hosts/blender/plugins/load/load_layout_blend.py b/openpype/hosts/blender/plugins/load/load_layout_blend.py index cf8e89ed1f..e0124053bf 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_blend.py +++ b/openpype/hosts/blender/plugins/load/load_layout_blend.py @@ -6,12 +6,12 @@ from typing import Dict, List, Optional import bpy -from openpype import lib from openpype.pipeline import ( legacy_create, get_representation_path, AVALON_CONTAINER_ID, ) +from openpype.pipeline.create import get_legacy_creator_by_name from openpype.hosts.blender.api import plugin from openpype.hosts.blender.api.pipeline import ( AVALON_CONTAINERS, @@ -157,7 +157,7 @@ class BlendLayoutLoader(plugin.AssetLoader): t.id = local_obj elif local_obj.type == 'EMPTY': - creator_plugin = lib.get_creator_by_name("CreateAnimation") + creator_plugin = get_legacy_creator_by_name("CreateAnimation") if not creator_plugin: raise ValueError("Creator plugin \"CreateAnimation\" was " "not found.") diff --git a/openpype/hosts/blender/plugins/load/load_layout_json.py b/openpype/hosts/blender/plugins/load/load_layout_json.py index a0580af4a0..eca098627e 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_json.py +++ b/openpype/hosts/blender/plugins/load/load_layout_json.py @@ -118,7 +118,7 @@ class JsonLayoutLoader(plugin.AssetLoader): # Camera creation when loading a layout is not necessary for now, # but the code is worth keeping in case we need it in the future. # # Create the camera asset and the camera instance - # creator_plugin = lib.get_creator_by_name("CreateCamera") + # creator_plugin = get_legacy_creator_by_name("CreateCamera") # if not creator_plugin: # raise ValueError("Creator plugin \"CreateCamera\" was " # "not found.") diff --git a/openpype/hosts/blender/plugins/load/load_rig.py b/openpype/hosts/blender/plugins/load/load_rig.py index 4dfa96167f..1d23a70061 100644 --- a/openpype/hosts/blender/plugins/load/load_rig.py +++ b/openpype/hosts/blender/plugins/load/load_rig.py @@ -6,12 +6,12 @@ from typing import Dict, List, Optional import bpy -from openpype import lib from openpype.pipeline import ( legacy_create, get_representation_path, AVALON_CONTAINER_ID, ) +from openpype.pipeline.create import get_legacy_creator_by_name from openpype.hosts.blender.api import ( plugin, get_selection, @@ -244,7 +244,7 @@ class BlendRigLoader(plugin.AssetLoader): objects = self._process(libpath, asset_group, group_name, action) if create_animation: - creator_plugin = lib.get_creator_by_name("CreateAnimation") + creator_plugin = get_legacy_creator_by_name("CreateAnimation") if not creator_plugin: raise ValueError("Creator plugin \"CreateAnimation\" was " "not found.") diff --git a/openpype/hosts/blender/plugins/publish/collect_current_file.py b/openpype/hosts/blender/plugins/publish/collect_current_file.py index 72976c490b..c3097a0694 100644 --- a/openpype/hosts/blender/plugins/publish/collect_current_file.py +++ b/openpype/hosts/blender/plugins/publish/collect_current_file.py @@ -1,6 +1,19 @@ +import os import bpy import pyblish.api +from openpype.pipeline import legacy_io +from openpype.hosts.blender.api import workio + + +class SaveWorkfiledAction(pyblish.api.Action): + """Save Workfile.""" + label = "Save Workfile" + on = "failed" + icon = "save" + + def process(self, context, plugin): + bpy.ops.wm.avalon_workfiles() class CollectBlenderCurrentFile(pyblish.api.ContextPlugin): @@ -8,12 +21,52 @@ class CollectBlenderCurrentFile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder - 0.5 label = "Blender Current File" - hosts = ['blender'] + hosts = ["blender"] + actions = [SaveWorkfiledAction] def process(self, context): """Inject the current working file""" - current_file = bpy.data.filepath - context.data['currentFile'] = current_file + current_file = workio.current_file() - assert current_file != '', "Current file is empty. " \ - "Save the file before continuing." + context.data["currentFile"] = current_file + + assert current_file, ( + "Current file is empty. Save the file before continuing." + ) + + folder, file = os.path.split(current_file) + filename, ext = os.path.splitext(file) + + task = legacy_io.Session["AVALON_TASK"] + + data = {} + + # create instance + instance = context.create_instance(name=filename) + subset = "workfile" + task.capitalize() + + data.update({ + "subset": subset, + "asset": os.getenv("AVALON_ASSET", None), + "label": subset, + "publish": True, + "family": "workfile", + "families": ["workfile"], + "setMembers": [current_file], + "frameStart": bpy.context.scene.frame_start, + "frameEnd": bpy.context.scene.frame_end, + }) + + data["representations"] = [{ + "name": ext.lstrip("."), + "ext": ext.lstrip("."), + "files": file, + "stagingDir": folder, + }] + + instance.data.update(data) + + self.log.info("Collected instance: {}".format(file)) + self.log.info("Scene path: {}".format(current_file)) + self.log.info("staging Dir: {}".format(folder)) + self.log.info("subset: {}".format(subset)) diff --git a/openpype/hosts/blender/plugins/publish/extract_abc.py b/openpype/hosts/blender/plugins/publish/extract_abc.py index a26a92f7e4..1cab9d225b 100644 --- a/openpype/hosts/blender/plugins/publish/extract_abc.py +++ b/openpype/hosts/blender/plugins/publish/extract_abc.py @@ -2,12 +2,12 @@ import os import bpy -from openpype import api +from openpype.pipeline import publish from openpype.hosts.blender.api import plugin from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY -class ExtractABC(api.Extractor): +class ExtractABC(publish.Extractor): """Extract as ABC.""" label = "Extract ABC" diff --git a/openpype/hosts/blender/plugins/publish/extract_blend.py b/openpype/hosts/blender/plugins/publish/extract_blend.py index 9add633f05..6a001b6f65 100644 --- a/openpype/hosts/blender/plugins/publish/extract_blend.py +++ b/openpype/hosts/blender/plugins/publish/extract_blend.py @@ -2,10 +2,10 @@ import os import bpy -import openpype.api +from openpype.pipeline import publish -class ExtractBlend(openpype.api.Extractor): +class ExtractBlend(publish.Extractor): """Extract a blend file.""" label = "Extract Blend" diff --git a/openpype/hosts/blender/plugins/publish/extract_blend_animation.py b/openpype/hosts/blender/plugins/publish/extract_blend_animation.py index 4917223331..477411b73d 100644 --- a/openpype/hosts/blender/plugins/publish/extract_blend_animation.py +++ b/openpype/hosts/blender/plugins/publish/extract_blend_animation.py @@ -2,10 +2,10 @@ import os import bpy -import openpype.api +from openpype.pipeline import publish -class ExtractBlendAnimation(openpype.api.Extractor): +class ExtractBlendAnimation(publish.Extractor): """Extract a blend file.""" label = "Extract Blend" diff --git a/openpype/hosts/blender/plugins/publish/extract_camera.py b/openpype/hosts/blender/plugins/publish/extract_camera.py index b2c7611b58..9fd181825c 100644 --- a/openpype/hosts/blender/plugins/publish/extract_camera.py +++ b/openpype/hosts/blender/plugins/publish/extract_camera.py @@ -2,11 +2,11 @@ import os import bpy -from openpype import api +from openpype.pipeline import publish from openpype.hosts.blender.api import plugin -class ExtractCamera(api.Extractor): +class ExtractCamera(publish.Extractor): """Extract as the camera as FBX.""" label = "Extract Camera" diff --git a/openpype/hosts/blender/plugins/publish/extract_fbx.py b/openpype/hosts/blender/plugins/publish/extract_fbx.py index 3ac66f33a4..0ad797c226 100644 --- a/openpype/hosts/blender/plugins/publish/extract_fbx.py +++ b/openpype/hosts/blender/plugins/publish/extract_fbx.py @@ -2,12 +2,12 @@ import os import bpy -from openpype import api +from openpype.pipeline import publish from openpype.hosts.blender.api import plugin from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY -class ExtractFBX(api.Extractor): +class ExtractFBX(publish.Extractor): """Extract as FBX.""" label = "Extract FBX" diff --git a/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py b/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py index 4b4a92932a..062b42e99d 100644 --- a/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py +++ b/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py @@ -5,12 +5,12 @@ import bpy import bpy_extras import bpy_extras.anim_utils -from openpype import api +from openpype.pipeline import publish from openpype.hosts.blender.api import plugin from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY -class ExtractAnimationFBX(api.Extractor): +class ExtractAnimationFBX(publish.Extractor): """Extract as animation.""" label = "Extract FBX" diff --git a/openpype/hosts/blender/plugins/publish/extract_layout.py b/openpype/hosts/blender/plugins/publish/extract_layout.py index 8502c6fbd4..f2d04f1178 100644 --- a/openpype/hosts/blender/plugins/publish/extract_layout.py +++ b/openpype/hosts/blender/plugins/publish/extract_layout.py @@ -6,12 +6,12 @@ import bpy_extras import bpy_extras.anim_utils from openpype.client import get_representation_by_name +from openpype.pipeline import publish from openpype.hosts.blender.api import plugin from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY -import openpype.api -class ExtractLayout(openpype.api.Extractor): +class ExtractLayout(publish.Extractor): """Extract a layout.""" label = "Extract Layout" diff --git a/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py b/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py index 39b9b67511..84b9dd1a6e 100644 --- a/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py +++ b/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py @@ -1,9 +1,11 @@ from typing import List -import mathutils +import bpy import pyblish.api + import openpype.hosts.blender.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin): @@ -14,21 +16,18 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin): in Unreal and Blender. """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["blender"] families = ["camera"] - category = "geometry" version = (0, 1, 0) label = "Zero Keyframe" actions = [openpype.hosts.blender.api.action.SelectInvalidAction] - _identity = mathutils.Matrix() - - @classmethod - def get_invalid(cls, instance) -> List: + @staticmethod + def get_invalid(instance) -> List: invalid = [] - for obj in [obj for obj in instance]: - if obj.type == "CAMERA": + for obj in instance: + if isinstance(obj, bpy.types.Object) and obj.type == "CAMERA": if obj.animation_data and obj.animation_data.action: action = obj.animation_data.action frames_set = set() @@ -45,4 +44,5 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: raise RuntimeError( - f"Object found in instance is not in Object Mode: {invalid}") + f"Camera must have a keyframe at frame 0: {invalid}" + ) diff --git a/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py b/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py index 1c73476fc8..cee855671d 100644 --- a/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py +++ b/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py @@ -3,13 +3,15 @@ from typing import List import bpy import pyblish.api + +from openpype.pipeline.publish import ValidateContentsOrder import openpype.hosts.blender.api.action class ValidateMeshHasUvs(pyblish.api.InstancePlugin): """Validate that the current mesh has UV's.""" - order = pyblish.api.ValidatorOrder + order = ValidateContentsOrder hosts = ["blender"] families = ["model"] category = "geometry" @@ -25,7 +27,10 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin): for uv_layer in obj.data.uv_layers: for polygon in obj.data.polygons: for loop_index in polygon.loop_indices: - if not uv_layer.data[loop_index].uv: + if ( + loop_index >= len(uv_layer.data) + or not uv_layer.data[loop_index].uv + ): return False return True @@ -33,20 +38,20 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance) -> List: invalid = [] - # TODO (jasper): only check objects in the collection that will be published? - for obj in [ - obj for obj in instance]: - try: - if obj.type == 'MESH': - # Make sure we are in object mode. - bpy.ops.object.mode_set(mode='OBJECT') - if not cls.has_uvs(obj): - invalid.append(obj) - except: - continue + for obj in instance: + if isinstance(obj, bpy.types.Object) and obj.type == 'MESH': + if obj.mode != "OBJECT": + cls.log.warning( + f"Mesh object {obj.name} should be in 'OBJECT' mode" + " to be properly checked." + ) + if not cls.has_uvs(obj): + invalid.append(obj) return invalid def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError(f"Meshes found in instance without valid UV's: {invalid}") + raise RuntimeError( + f"Meshes found in instance without valid UV's: {invalid}" + ) diff --git a/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py b/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py index 00159a2d36..45ac08811d 100644 --- a/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py +++ b/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py @@ -3,28 +3,28 @@ from typing import List import bpy import pyblish.api + +from openpype.pipeline.publish import ValidateContentsOrder import openpype.hosts.blender.api.action class ValidateMeshNoNegativeScale(pyblish.api.Validator): """Ensure that meshes don't have a negative scale.""" - order = pyblish.api.ValidatorOrder + order = ValidateContentsOrder hosts = ["blender"] families = ["model"] + category = "geometry" label = "Mesh No Negative Scale" actions = [openpype.hosts.blender.api.action.SelectInvalidAction] @staticmethod def get_invalid(instance) -> List: invalid = [] - # TODO (jasper): only check objects in the collection that will be published? - for obj in [ - obj for obj in bpy.data.objects if obj.type == 'MESH' - ]: - if any(v < 0 for v in obj.scale): - invalid.append(obj) - + for obj in instance: + if isinstance(obj, bpy.types.Object) and obj.type == 'MESH': + if any(v < 0 for v in obj.scale): + invalid.append(obj) return invalid def process(self, instance): diff --git a/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py b/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py index 261ff864d5..f5dc9fdd5c 100644 --- a/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py +++ b/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py @@ -1,7 +1,11 @@ from typing import List +import bpy + import pyblish.api + import openpype.hosts.blender.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateNoColonsInName(pyblish.api.InstancePlugin): @@ -12,20 +16,20 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["blender"] families = ["model", "rig"] version = (0, 1, 0) label = "No Colons in names" actions = [openpype.hosts.blender.api.action.SelectInvalidAction] - @classmethod - def get_invalid(cls, instance) -> List: + @staticmethod + def get_invalid(instance) -> List: invalid = [] - for obj in [obj for obj in instance]: + for obj in instance: if ':' in obj.name: invalid.append(obj) - if obj.type == 'ARMATURE': + if isinstance(obj, bpy.types.Object) and obj.type == 'ARMATURE': for bone in obj.data.bones: if ':' in bone.name: invalid.append(obj) @@ -36,4 +40,5 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: raise RuntimeError( - f"Objects found with colon in name: {invalid}") + f"Objects found with colon in name: {invalid}" + ) diff --git a/openpype/hosts/blender/plugins/publish/validate_object_mode.py b/openpype/hosts/blender/plugins/publish/validate_object_mode.py index 90ef0b7c41..ac60e00f89 100644 --- a/openpype/hosts/blender/plugins/publish/validate_object_mode.py +++ b/openpype/hosts/blender/plugins/publish/validate_object_mode.py @@ -1,5 +1,7 @@ from typing import List +import bpy + import pyblish.api import openpype.hosts.blender.api.action @@ -10,26 +12,21 @@ class ValidateObjectIsInObjectMode(pyblish.api.InstancePlugin): order = pyblish.api.ValidatorOrder - 0.01 hosts = ["blender"] families = ["model", "rig", "layout"] - category = "geometry" label = "Validate Object Mode" actions = [openpype.hosts.blender.api.action.SelectInvalidAction] optional = False - @classmethod - def get_invalid(cls, instance) -> List: + @staticmethod + def get_invalid(instance) -> List: invalid = [] - for obj in [obj for obj in instance]: - try: - if obj.type == 'MESH' or obj.type == 'ARMATURE': - # Check if the object is in object mode. - if not obj.mode == 'OBJECT': - invalid.append(obj) - except Exception: - continue + for obj in instance: + if isinstance(obj, bpy.types.Object) and obj.mode != "OBJECT": + invalid.append(obj) return invalid def process(self, instance): invalid = self.get_invalid(instance) if invalid: raise RuntimeError( - f"Object found in instance is not in Object Mode: {invalid}") + f"Object found in instance is not in Object Mode: {invalid}" + ) diff --git a/openpype/hosts/blender/plugins/publish/validate_transform_zero.py b/openpype/hosts/blender/plugins/publish/validate_transform_zero.py index 7456dbc423..742826d3d9 100644 --- a/openpype/hosts/blender/plugins/publish/validate_transform_zero.py +++ b/openpype/hosts/blender/plugins/publish/validate_transform_zero.py @@ -1,9 +1,12 @@ from typing import List import mathutils +import bpy import pyblish.api + import openpype.hosts.blender.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateTransformZero(pyblish.api.InstancePlugin): @@ -15,10 +18,9 @@ class ValidateTransformZero(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["blender"] families = ["model"] - category = "geometry" version = (0, 1, 0) label = "Transform Zero" actions = [openpype.hosts.blender.api.action.SelectInvalidAction] @@ -28,8 +30,11 @@ class ValidateTransformZero(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance) -> List: invalid = [] - for obj in [obj for obj in instance]: - if obj.matrix_basis != cls._identity: + for obj in instance: + if ( + isinstance(obj, bpy.types.Object) + and obj.matrix_basis != cls._identity + ): invalid.append(obj) return invalid @@ -37,4 +42,6 @@ class ValidateTransformZero(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: raise RuntimeError( - f"Object found in instance is not in Object Mode: {invalid}") + "Object found in instance has not" + f" transform to zero: {invalid}" + ) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index 8c7b3a2e74..88fc11cafb 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -6,15 +6,14 @@ import argparse import pyblish.api import pyblish.util -from openpype.api import Logger -import openpype import openpype.hosts.celaction +from openpype.lib import Logger from openpype.hosts.celaction import api as celaction from openpype.tools.utils import host_tools from openpype.pipeline import install_openpype_plugins -log = Logger().get_logger("Celaction_cli_publisher") +log = Logger.get_logger("Celaction_cli_publisher") publish_host = "celaction" diff --git a/openpype/hosts/celaction/plugins/publish/collect_audio.py b/openpype/hosts/celaction/plugins/publish/collect_audio.py deleted file mode 100644 index c6e3bf2c03..0000000000 --- a/openpype/hosts/celaction/plugins/publish/collect_audio.py +++ /dev/null @@ -1,113 +0,0 @@ -import os -import collections -from pprint import pformat - -import pyblish.api - -from openpype.client import ( - get_subsets, - get_last_versions, - get_representations -) -from openpype.pipeline import legacy_io - - -class AppendCelactionAudio(pyblish.api.ContextPlugin): - - label = "Colect Audio for publishing" - order = pyblish.api.CollectorOrder + 0.1 - - def process(self, context): - self.log.info('Collecting Audio Data') - asset_doc = context.data["assetEntity"] - - # get all available representations - subsets = self.get_subsets( - asset_doc, - representations=["audio", "wav"] - ) - self.log.info(f"subsets is: {pformat(subsets)}") - - if not subsets.get("audioMain"): - raise AttributeError("`audioMain` subset does not exist") - - reprs = subsets.get("audioMain", {}).get("representations", []) - self.log.info(f"reprs is: {pformat(reprs)}") - - repr = next((r for r in reprs), None) - if not repr: - raise "Missing `audioMain` representation" - self.log.info(f"representation is: {repr}") - - audio_file = repr.get('data', {}).get('path', "") - - if os.path.exists(audio_file): - context.data["audioFile"] = audio_file - self.log.info( - 'audio_file: {}, has been added to context'.format(audio_file)) - else: - self.log.warning("Couldn't find any audio file on Ftrack.") - - def get_subsets(self, asset_doc, representations): - """ - Query subsets with filter on name. - - The method will return all found subsets and its defined version - and subsets. Version could be specified with number. Representation - can be filtered. - - Arguments: - asset_doct (dict): Asset (shot) mongo document - representations (list): list for all representations - - Returns: - dict: subsets with version and representations in keys - """ - - # Query all subsets for asset - project_name = legacy_io.active_project() - subset_docs = get_subsets( - project_name, asset_ids=[asset_doc["_id"]], fields=["_id"] - ) - # Collect all subset ids - subset_ids = [ - subset_doc["_id"] - for subset_doc in subset_docs - ] - - # Check if we found anything - assert subset_ids, ( - "No subsets found. Check correct filter. " - "Try this for start `r'.*'`: asset: `{}`" - ).format(asset_doc["name"]) - - last_versions_by_subset_id = get_last_versions( - project_name, subset_ids, fields=["_id", "parent"] - ) - - version_docs_by_id = {} - for version_doc in last_versions_by_subset_id.values(): - version_docs_by_id[version_doc["_id"]] = version_doc - - repre_docs = get_representations( - project_name, - version_ids=version_docs_by_id.keys(), - representation_names=representations - ) - repre_docs_by_version_id = collections.defaultdict(list) - for repre_doc in repre_docs: - version_id = repre_doc["parent"] - repre_docs_by_version_id[version_id].append(repre_doc) - - output_dict = {} - for version_id, repre_docs in repre_docs_by_version_id.items(): - version_doc = version_docs_by_id[version_id] - subset_id = version_doc["parent"] - subset_doc = last_versions_by_subset_id[subset_id] - # Store queried docs by subset name - output_dict[subset_doc["name"]] = { - "representations": repre_docs, - "version": version_doc - } - - return output_dict diff --git a/openpype/hosts/flame/__init__.py b/openpype/hosts/flame/__init__.py index f839357147..b45f107747 100644 --- a/openpype/hosts/flame/__init__.py +++ b/openpype/hosts/flame/__init__.py @@ -1,22 +1,10 @@ -import os - -HOST_DIR = os.path.dirname( - os.path.abspath(__file__) +from .addon import ( + HOST_DIR, + FlameAddon, ) -def add_implementation_envs(env, _app): - # Add requirements to DL_PYTHON_HOOK_PATH - pype_root = os.environ["OPENPYPE_REPOS_ROOT"] - - env["DL_PYTHON_HOOK_PATH"] = os.path.join( - pype_root, "openpype", "hosts", "flame", "startup") - env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) - - # Set default values if are not already set via settings - defaults = { - "LOGLEVEL": "DEBUG" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value +__all__ = ( + "HOST_DIR", + "FlameAddon", +) diff --git a/openpype/hosts/flame/addon.py b/openpype/hosts/flame/addon.py new file mode 100644 index 0000000000..5a34413bb0 --- /dev/null +++ b/openpype/hosts/flame/addon.py @@ -0,0 +1,36 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +HOST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class FlameAddon(OpenPypeModule, IHostAddon): + name = "flame" + host_name = "flame" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + # Add requirements to DL_PYTHON_HOOK_PATH + env["DL_PYTHON_HOOK_PATH"] = os.path.join(HOST_DIR, "startup") + env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + + # Set default values if are not already set via settings + defaults = { + "LOGLEVEL": "DEBUG" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(HOST_DIR, "hooks") + ] + + def get_workfile_extensions(self): + return [".otoc"] diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index 2c461e5f16..c00ee958b6 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -30,7 +30,8 @@ from .lib import ( maintained_temp_file_path, get_clip_segment, get_batch_group_from_desktop, - MediaInfoFile + MediaInfoFile, + TimeEffectMetadata ) from .utils import ( setup, @@ -50,7 +51,8 @@ from .pipeline import ( ) from .menu import ( FlameMenuProjectConnect, - FlameMenuTimeline + FlameMenuTimeline, + FlameMenuUniversal ) from .plugin import ( Creator, @@ -107,6 +109,7 @@ __all__ = [ "get_clip_segment", "get_batch_group_from_desktop", "MediaInfoFile", + "TimeEffectMetadata", # pipeline "install", @@ -129,6 +132,7 @@ __all__ = [ # menu "FlameMenuProjectConnect", "FlameMenuTimeline", + "FlameMenuUniversal", # plugin "Creator", diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index d59308ad6c..6aca5c5ce6 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -5,12 +5,16 @@ import json import pickle import clique import tempfile +import traceback import itertools import contextlib import xml.etree.cElementTree as cET -from copy import deepcopy +from copy import deepcopy, copy from xml.etree import ElementTree as ET from pprint import pformat + +from openpype.lib import Logger, run_subprocess + from .constants import ( MARKER_COLOR, MARKER_DURATION, @@ -19,9 +23,7 @@ from .constants import ( MARKER_PUBLISH_DEFAULT ) -import openpype.api as openpype - -log = openpype.Logger.get_logger(__name__) +log = Logger.get_logger(__name__) FRAME_PATTERN = re.compile(r"[\._](\d+)[\.]") @@ -266,7 +268,7 @@ def get_current_sequence(selection): def rescan_hooks(): import flame try: - flame.execute_shortcut('Rescan Python Hooks') + flame.execute_shortcut("Rescan Python Hooks") except Exception: pass @@ -765,11 +767,11 @@ class MediaInfoFile(object): _drop_mode = None _file_pattern = None - def __init__(self, path, **kwargs): + def __init__(self, path, logger=None): # replace log if any - if kwargs.get("logger"): - self.log = kwargs["logger"] + if logger: + self.log = logger # test if `dl_get_media_info` paht exists self._validate_media_script_path() @@ -1015,7 +1017,7 @@ class MediaInfoFile(object): try: # execute creation of clip xml template data - openpype.run_subprocess(cmd_args) + run_subprocess(cmd_args) except TypeError as error: raise TypeError( "Error creating `{}` due: {}".format(fpath, error)) @@ -1082,21 +1084,21 @@ class MediaInfoFile(object): xml_data (ET.Element): clip data """ try: - for out_track in xml_data.iter('track'): - for out_feed in out_track.iter('feed'): + for out_track in xml_data.iter("track"): + for out_feed in out_track.iter("feed"): # start frame out_feed_nb_ticks_obj = out_feed.find( - 'startTimecode/nbTicks') + "startTimecode/nbTicks") self.start_frame = out_feed_nb_ticks_obj.text # fps out_feed_fps_obj = out_feed.find( - 'startTimecode/rate') + "startTimecode/rate") self.fps = out_feed_fps_obj.text # drop frame mode out_feed_drop_mode_obj = out_feed.find( - 'startTimecode/dropMode') + "startTimecode/dropMode") self.drop_mode = out_feed_drop_mode_obj.text break except Exception as msg: @@ -1118,8 +1120,153 @@ class MediaInfoFile(object): tree = cET.ElementTree(xml_element_data) tree.write( fpath, xml_declaration=True, - method='xml', encoding='UTF-8' + method="xml", encoding="UTF-8" ) except IOError as error: raise IOError( "Not able to write data to file: {}".format(error)) + + +class TimeEffectMetadata(object): + log = log + _data = {} + _retime_modes = { + 0: "speed", + 1: "timewarp", + 2: "duration" + } + + def __init__(self, segment, logger=None): + if logger: + self.log = logger + + self._data = self._get_metadata(segment) + + @property + def data(self): + """ Returns timewarp effect data + + Returns: + dict: retime data + """ + return self._data + + def _get_metadata(self, segment): + effects = segment.effects or [] + for effect in effects: + if effect.type == "Timewarp": + with maintained_temp_file_path(".timewarp_node") as tmp_path: + self.log.info("Temp File: {}".format(tmp_path)) + effect.save_setup(tmp_path) + return self._get_attributes_from_xml(tmp_path) + + return {} + + def _get_attributes_from_xml(self, tmp_path): + with open(tmp_path, "r") as tw_setup_file: + tw_setup_string = tw_setup_file.read() + tw_setup_file.close() + + tw_setup_xml = ET.fromstring(tw_setup_string) + tw_setup = self._dictify(tw_setup_xml) + # pprint(tw_setup) + try: + tw_setup_state = tw_setup["Setup"]["State"][0] + mode = int( + tw_setup_state["TW_RetimerMode"][0]["_text"] + ) + r_data = { + "type": self._retime_modes[mode], + "effectStart": int( + tw_setup["Setup"]["Base"][0]["Range"][0]["Start"]), + "effectEnd": int( + tw_setup["Setup"]["Base"][0]["Range"][0]["End"]) + } + + if mode == 0: # speed + r_data[self._retime_modes[mode]] = float( + tw_setup_state["TW_Speed"] + [0]["Channel"][0]["Value"][0]["_text"] + ) / 100 + elif mode == 1: # timewarp + print("timing") + r_data[self._retime_modes[mode]] = self._get_anim_keys( + tw_setup_state["TW_Timing"] + ) + elif mode == 2: # duration + r_data[self._retime_modes[mode]] = { + "start": { + "source": int( + tw_setup_state["TW_DurationTiming"][0]["Channel"] + [0]["KFrames"][0]["Key"][0]["Value"][0]["_text"] + ), + "timeline": int( + tw_setup_state["TW_DurationTiming"][0]["Channel"] + [0]["KFrames"][0]["Key"][0]["Frame"][0]["_text"] + ) + }, + "end": { + "source": int( + tw_setup_state["TW_DurationTiming"][0]["Channel"] + [0]["KFrames"][0]["Key"][1]["Value"][0]["_text"] + ), + "timeline": int( + tw_setup_state["TW_DurationTiming"][0]["Channel"] + [0]["KFrames"][0]["Key"][1]["Frame"][0]["_text"] + ) + } + } + except Exception: + lines = traceback.format_exception(*sys.exc_info()) + self.log.error("\n".join(lines)) + return + + return r_data + + def _get_anim_keys(self, setup_cat, index=None): + return_data = { + "extrapolation": ( + setup_cat[0]["Channel"][0]["Extrap"][0]["_text"] + ), + "animKeys": [] + } + for key in setup_cat[0]["Channel"][0]["KFrames"][0]["Key"]: + if index and int(key["Index"]) != index: + continue + key_data = { + "source": float(key["Value"][0]["_text"]), + "timeline": float(key["Frame"][0]["_text"]), + "index": int(key["Index"]), + "curveMode": key["CurveMode"][0]["_text"], + "curveOrder": key["CurveOrder"][0]["_text"] + } + if key.get("TangentMode"): + key_data["tangentMode"] = key["TangentMode"][0]["_text"] + + return_data["animKeys"].append(key_data) + + return return_data + + def _dictify(self, xml_, root=True): + """ Convert xml object to dictionary + + Args: + xml_ (xml.etree.ElementTree.Element): xml data + root (bool, optional): is root available. Defaults to True. + + Returns: + dict: dictionarized xml + """ + + if root: + return {xml_.tag: self._dictify(xml_, False)} + + d = copy(xml_.attrib) + if xml_.text: + d["_text"] = xml_.text + + for x in xml_.findall("./*"): + if x.tag not in d: + d[x.tag] = [] + d[x.tag].append(self._dictify(x, False)) + return d diff --git a/openpype/hosts/flame/api/menu.py b/openpype/hosts/flame/api/menu.py index 7f1a6a24e2..f72a352bba 100644 --- a/openpype/hosts/flame/api/menu.py +++ b/openpype/hosts/flame/api/menu.py @@ -201,3 +201,53 @@ class FlameMenuTimeline(_FlameMenuApp): if self.flame: self.flame.execute_shortcut('Rescan Python Hooks') self.log.info('Rescan Python Hooks') + + +class FlameMenuUniversal(_FlameMenuApp): + + # flameMenuProjectconnect app takes care of the preferences dialog as well + + def __init__(self, framework): + _FlameMenuApp.__init__(self, framework) + + def __getattr__(self, name): + def method(*args, **kwargs): + project = self.dynamic_menu_data.get(name) + if project: + self.link_project(project) + return method + + def build_menu(self): + if not self.flame: + return [] + + menu = deepcopy(self.menu) + + menu['actions'].append({ + "name": "Load...", + "execute": lambda x: self.tools_helper.show_loader() + }) + menu['actions'].append({ + "name": "Manage...", + "execute": lambda x: self.tools_helper.show_scene_inventory() + }) + menu['actions'].append({ + "name": "Library...", + "execute": lambda x: self.tools_helper.show_library_loader() + }) + return menu + + def refresh(self, *args, **kwargs): + self.rescan() + + def rescan(self, *args, **kwargs): + if not self.flame: + try: + import flame + self.flame = flame + except ImportError: + self.flame = None + + if self.flame: + self.flame.execute_shortcut('Rescan Python Hooks') + self.log.info('Rescan Python Hooks') diff --git a/openpype/hosts/flame/api/pipeline.py b/openpype/hosts/flame/api/pipeline.py index da44be1b15..3a23389961 100644 --- a/openpype/hosts/flame/api/pipeline.py +++ b/openpype/hosts/flame/api/pipeline.py @@ -5,7 +5,7 @@ import os import contextlib from pyblish import api as pyblish -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, @@ -90,8 +90,7 @@ def containerise(flame_clip_segment, def ls(): """List available containers. """ - # TODO: ls - pass + return [] def parse_container(tl_segment, validate=True): @@ -107,6 +106,7 @@ def update_container(tl_segment, data=None): # TODO: update_container pass + def on_pyblish_instance_toggled(instance, old_value, new_value): """Toggle node passthrough states on instance toggles.""" diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index efbabb6a55..092ce9d106 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -6,16 +6,17 @@ from xml.etree import ElementTree as ET from Qt import QtCore, QtWidgets -import openpype.api as openpype import qargparse from openpype import style +from openpype.settings import get_current_project_settings +from openpype.lib import Logger from openpype.pipeline import LegacyCreator, LoaderPlugin from . import constants from . import lib as flib from . import pipeline as fpipeline -log = openpype.Logger.get_logger(__name__) +log = Logger.get_logger(__name__) class CreatorWidget(QtWidgets.QDialog): @@ -305,7 +306,7 @@ class Creator(LegacyCreator): def __init__(self, *args, **kwargs): super(Creator, self).__init__(*args, **kwargs) - self.presets = openpype.get_current_project_settings()[ + self.presets = get_current_project_settings()[ "flame"]["create"].get(self.__class__.__name__, {}) # adding basic current context flame objects @@ -361,6 +362,8 @@ class PublishableClip: index_from_segment_default = False use_shot_name_default = False include_handles_default = False + retimed_handles_default = True + retimed_framerange_default = True def __init__(self, segment, **kwargs): self.rename_index = kwargs["rename_index"] @@ -496,6 +499,14 @@ class PublishableClip: "audio", {}).get("value") or False self.include_handles = self.ui_inputs.get( "includeHandles", {}).get("value") or self.include_handles_default + self.retimed_handles = ( + self.ui_inputs.get("retimedHandles", {}).get("value") + or self.retimed_handles_default + ) + self.retimed_framerange = ( + self.ui_inputs.get("retimedFramerange", {}).get("value") + or self.retimed_framerange_default + ) # build subset name from layer name if self.subset_name == "[ track name ]": @@ -668,6 +679,7 @@ class ClipLoader(LoaderPlugin): `update` logic. """ + log = log options = [ qargparse.Boolean( @@ -684,16 +696,20 @@ class OpenClipSolver(flib.MediaInfoFile): log = log - def __init__(self, openclip_file_path, feed_data): + def __init__(self, openclip_file_path, feed_data, logger=None): self.out_file = openclip_file_path + # replace log if any + if logger: + self.log = logger + # new feed variables: feed_path = feed_data.pop("path") # initialize parent class super(OpenClipSolver, self).__init__( feed_path, - **feed_data + logger=logger ) # get other metadata @@ -741,17 +757,18 @@ class OpenClipSolver(flib.MediaInfoFile): self.log.info("Building new openClip") self.log.debug(">> self.clip_data: {}".format(self.clip_data)) - # clip data comming from MediaInfoFile - tmp_xml_feeds = self.clip_data.find('tracks/track/feeds') - tmp_xml_feeds.set('currentVersion', self.feed_version_name) - for tmp_feed in tmp_xml_feeds: - tmp_feed.set('vuid', self.feed_version_name) + for tmp_xml_track in self.clip_data.iter("track"): + tmp_xml_feeds = tmp_xml_track.find('feeds') + tmp_xml_feeds.set('currentVersion', self.feed_version_name) - # add colorspace if any is set - if self.feed_colorspace: - self._add_colorspace(tmp_feed, self.feed_colorspace) + for tmp_feed in tmp_xml_track.iter("feed"): + tmp_feed.set('vuid', self.feed_version_name) - self._clear_handler(tmp_feed) + # add colorspace if any is set + if self.feed_colorspace: + self._add_colorspace(tmp_feed, self.feed_colorspace) + + self._clear_handler(tmp_feed) tmp_xml_versions_obj = self.clip_data.find('versions') tmp_xml_versions_obj.set('currentVersion', self.feed_version_name) @@ -764,6 +781,17 @@ class OpenClipSolver(flib.MediaInfoFile): self.write_clip_data_to_file(self.out_file, self.clip_data) + def _get_xml_track_obj_by_uid(self, xml_data, uid): + # loop all tracks of input xml data + for xml_track in xml_data.iter("track"): + track_uid = xml_track.get("uid") + self.log.debug( + ">> track_uid:uid: {}:{}".format(track_uid, uid)) + + # get matching uids + if uid == track_uid: + return xml_track + def _update_open_clip(self): self.log.info("Updating openClip ..") @@ -773,52 +801,81 @@ class OpenClipSolver(flib.MediaInfoFile): self.log.debug(">> out_xml: {}".format(out_xml)) self.log.debug(">> self.clip_data: {}".format(self.clip_data)) - # Get new feed from tmp file - tmp_xml_feed = self.clip_data.find('tracks/track/feeds/feed') + # loop tmp tracks + updated_any = False + for tmp_xml_track in self.clip_data.iter("track"): + # get tmp track uid + tmp_track_uid = tmp_xml_track.get("uid") + self.log.debug(">> tmp_track_uid: {}".format(tmp_track_uid)) - self._clear_handler(tmp_xml_feed) + # get out data track by uid + out_track_element = self._get_xml_track_obj_by_uid( + out_xml, tmp_track_uid) + self.log.debug( + ">> out_track_element: {}".format(out_track_element)) - # update fps from MediaInfoFile class - if self.fps: - tmp_feed_fps_obj = tmp_xml_feed.find( - "startTimecode/rate") - tmp_feed_fps_obj.text = str(self.fps) + # loop tmp feeds + for tmp_xml_feed in tmp_xml_track.iter("feed"): + new_path_obj = tmp_xml_feed.find( + "spans/span/path") + new_path = new_path_obj.text - # update start_frame from MediaInfoFile class - if self.start_frame: - tmp_feed_nb_ticks_obj = tmp_xml_feed.find( - "startTimecode/nbTicks") - tmp_feed_nb_ticks_obj.text = str(self.start_frame) + # check if feed path already exists in track's feeds + if ( + out_track_element is not None + and self._feed_exists(out_track_element, new_path) + ): + continue - # update drop_mode from MediaInfoFile class - if self.drop_mode: - tmp_feed_drop_mode_obj = tmp_xml_feed.find( - "startTimecode/dropMode") - tmp_feed_drop_mode_obj.text = str(self.drop_mode) + # rename versions on feeds + tmp_xml_feed.set('vuid', self.feed_version_name) + self._clear_handler(tmp_xml_feed) - new_path_obj = tmp_xml_feed.find( - "spans/span/path") - new_path = new_path_obj.text + # update fps from MediaInfoFile class + if self.fps is not None: + tmp_feed_fps_obj = tmp_xml_feed.find( + "startTimecode/rate") + tmp_feed_fps_obj.text = str(self.fps) - feed_added = False - if not self._feed_exists(out_xml, new_path): - tmp_xml_feed.set('vuid', self.feed_version_name) - # Append new temp file feed to .clip source out xml - out_track = out_xml.find("tracks/track") - # add colorspace if any is set - if self.feed_colorspace: - self._add_colorspace(tmp_xml_feed, self.feed_colorspace) + # update start_frame from MediaInfoFile class + if self.start_frame is not None: + tmp_feed_nb_ticks_obj = tmp_xml_feed.find( + "startTimecode/nbTicks") + tmp_feed_nb_ticks_obj.text = str(self.start_frame) - out_feeds = out_track.find('feeds') - out_feeds.set('currentVersion', self.feed_version_name) - out_feeds.append(tmp_xml_feed) + # update drop_mode from MediaInfoFile class + if self.drop_mode is not None: + tmp_feed_drop_mode_obj = tmp_xml_feed.find( + "startTimecode/dropMode") + tmp_feed_drop_mode_obj.text = str(self.drop_mode) - self.log.info( - "Appending new feed: {}".format( - self.feed_version_name)) - feed_added = True + # add colorspace if any is set + if self.feed_colorspace is not None: + self._add_colorspace(tmp_xml_feed, self.feed_colorspace) - if feed_added: + # then append/update feed to correct track in output + if out_track_element: + self.log.debug("updating track element ..") + # update already present track + out_feeds = out_track_element.find('feeds') + out_feeds.set('currentVersion', self.feed_version_name) + out_feeds.append(tmp_xml_feed) + + self.log.info( + "Appending new feed: {}".format( + self.feed_version_name)) + else: + self.log.debug("adding new track element ..") + # create new track as it doesnt exists yet + # set current version to feeds on tmp + tmp_xml_feeds = tmp_xml_track.find('feeds') + tmp_xml_feeds.set('currentVersion', self.feed_version_name) + out_tracks = out_xml.find("tracks") + out_tracks.append(tmp_xml_track) + + updated_any = True + + if updated_any: # Append vUID to versions out_xml_versions_obj = out_xml.find('versions') out_xml_versions_obj.set( diff --git a/openpype/hosts/flame/api/render_utils.py b/openpype/hosts/flame/api/render_utils.py index a29d6be695..7e50c2b23e 100644 --- a/openpype/hosts/flame/api/render_utils.py +++ b/openpype/hosts/flame/api/render_utils.py @@ -1,6 +1,6 @@ import os from xml.etree import ElementTree as ET -from openpype.api import Logger +from openpype.lib import Logger log = Logger.get_logger(__name__) diff --git a/openpype/hosts/flame/api/utils.py b/openpype/hosts/flame/api/utils.py index 2dfdfa8f48..fb8bdee42d 100644 --- a/openpype/hosts/flame/api/utils.py +++ b/openpype/hosts/flame/api/utils.py @@ -4,7 +4,7 @@ Flame utils for syncing scripts import os import shutil -from openpype.api import Logger +from openpype.lib import Logger log = Logger.get_logger(__name__) diff --git a/openpype/hosts/flame/api/workio.py b/openpype/hosts/flame/api/workio.py index 0c96c0752a..e49321c75a 100644 --- a/openpype/hosts/flame/api/workio.py +++ b/openpype/hosts/flame/api/workio.py @@ -1,7 +1,7 @@ """Host API required Work Files tool""" import os -from openpype.api import Logger +from openpype.lib import Logger # from .. import ( # get_project_manager, # get_current_project diff --git a/openpype/hosts/flame/hooks/pre_flame_setup.py b/openpype/hosts/flame/hooks/pre_flame_setup.py index ad2b0dc897..713daf1031 100644 --- a/openpype/hosts/flame/hooks/pre_flame_setup.py +++ b/openpype/hosts/flame/hooks/pre_flame_setup.py @@ -3,16 +3,17 @@ import json import tempfile import contextlib import socket +from pprint import pformat + from openpype.lib import ( PreLaunchHook, - get_openpype_username + get_openpype_username, + run_subprocess, ) from openpype.lib.applications import ( ApplicationLaunchFailed ) from openpype.hosts import flame as opflame -import openpype -from pprint import pformat class FlamePrelaunch(PreLaunchHook): @@ -22,6 +23,7 @@ class FlamePrelaunch(PreLaunchHook): in environment var FLAME_SCRIPT_DIR. """ app_groups = ["flame"] + permissions = 0o777 wtc_script_path = os.path.join( opflame.HOST_DIR, "api", "scripts", "wiretap_com.py") @@ -38,19 +40,12 @@ class FlamePrelaunch(PreLaunchHook): """Hook entry method.""" project_doc = self.data["project_doc"] project_name = project_doc["name"] + volume_name = _env.get("FLAME_WIRETAP_VOLUME") # get image io - project_anatomy = self.data["anatomy"] + project_settings = self.data["project_settings"] - # make sure anatomy settings are having flame key - if not project_anatomy["imageio"].get("flame"): - raise ApplicationLaunchFailed(( - "Anatomy project settings are missing `flame` key. " - "Please make sure you remove project overides on " - "Anatomy Image io") - ) - - imageio_flame = project_anatomy["imageio"]["flame"] + imageio_flame = project_settings["flame"]["imageio"] # get user name and host name user_name = get_openpype_username() @@ -81,7 +76,7 @@ class FlamePrelaunch(PreLaunchHook): data_to_script = { # from settings "host_name": _env.get("FLAME_WIRETAP_HOSTNAME") or hostname, - "volume_name": _env.get("FLAME_WIRETAP_VOLUME"), + "volume_name": volume_name, "group_name": _env.get("FLAME_WIRETAP_GROUP"), "color_policy": str(imageio_flame["project"]["colourPolicy"]), @@ -99,8 +94,40 @@ class FlamePrelaunch(PreLaunchHook): app_arguments = self._get_launch_arguments(data_to_script) + # fix project data permission issue + self._fix_permissions(project_name, volume_name) + self.launch_context.launch_args.extend(app_arguments) + def _fix_permissions(self, project_name, volume_name): + """Work around for project data permissions + + Reported issue: when project is created locally on one machine, + it is impossible to migrate it to other machine. Autodesk Flame + is crating some unmanagable files which needs to be opened to 0o777. + + Args: + project_name (str): project name + volume_name (str): studio volume + """ + dirs_to_modify = [ + "/usr/discreet/project/{}".format(project_name), + "/opt/Autodesk/clip/{}/{}.prj".format(volume_name, project_name), + "/usr/discreet/clip/{}/{}.prj".format(volume_name, project_name) + ] + + for dirtm in dirs_to_modify: + for root, dirs, files in os.walk(dirtm): + try: + for name in set(dirs) | set(files): + path = os.path.join(root, name) + st = os.stat(path) + if oct(st.st_mode) != self.permissions: + os.chmod(path, self.permissions) + + except OSError as exc: + self.log.warning("Not able to open files: {}".format(exc)) + def _get_flame_fps(self, fps_num): fps_table = { float(23.976): "23.976 fps", @@ -152,7 +179,7 @@ class FlamePrelaunch(PreLaunchHook): "env": self.launch_context.env } - openpype.api.run_subprocess(args, **process_kwargs) + run_subprocess(args, **process_kwargs) # process returned json file to pass launch args return_json_data = open(tmp_json_path).read() diff --git a/openpype/hosts/flame/otio/flame_export.py b/openpype/hosts/flame/otio/flame_export.py index 1e4ef866ed..6d6b33d2a1 100644 --- a/openpype/hosts/flame/otio/flame_export.py +++ b/openpype/hosts/flame/otio/flame_export.py @@ -275,7 +275,7 @@ def create_otio_reference(clip_data, fps=None): def create_otio_clip(clip_data): - from openpype.hosts.flame.api import MediaInfoFile + from openpype.hosts.flame.api import MediaInfoFile, TimeEffectMetadata segment = clip_data["PySegment"] @@ -284,14 +284,31 @@ def create_otio_clip(clip_data): media_timecode_start = media_info.start_frame media_fps = media_info.fps + # Timewarp metadata + tw_data = TimeEffectMetadata(segment, logger=log).data + log.debug("__ tw_data: {}".format(tw_data)) + # define first frame - first_frame = media_timecode_start or utils.get_frame_from_filename( - clip_data["fpath"]) or 0 + file_first_frame = utils.get_frame_from_filename( + clip_data["fpath"]) + if file_first_frame: + file_first_frame = int(file_first_frame) + + first_frame = media_timecode_start or file_first_frame or 0 _clip_source_in = int(clip_data["source_in"]) _clip_source_out = int(clip_data["source_out"]) + _clip_record_in = clip_data["record_in"] + _clip_record_out = clip_data["record_out"] _clip_record_duration = int(clip_data["record_duration"]) + log.debug("_ file_first_frame: {}".format(file_first_frame)) + log.debug("_ first_frame: {}".format(first_frame)) + log.debug("_ _clip_source_in: {}".format(_clip_source_in)) + log.debug("_ _clip_source_out: {}".format(_clip_source_out)) + log.debug("_ _clip_record_in: {}".format(_clip_record_in)) + log.debug("_ _clip_record_out: {}".format(_clip_record_out)) + # first solve if the reverse timing speed = 1 if clip_data["source_in"] > clip_data["source_out"]: @@ -302,16 +319,28 @@ def create_otio_clip(clip_data): source_in = _clip_source_in - int(first_frame) source_out = _clip_source_out - int(first_frame) + log.debug("_ source_in: {}".format(source_in)) + log.debug("_ source_out: {}".format(source_out)) + + if file_first_frame: + log.debug("_ file_source_in: {}".format( + file_first_frame + source_in)) + log.debug("_ file_source_in: {}".format( + file_first_frame + source_out)) + source_duration = (source_out - source_in + 1) # secondly check if any change of speed if source_duration != _clip_record_duration: retime_speed = float(source_duration) / float(_clip_record_duration) - log.debug("_ retime_speed: {}".format(retime_speed)) + log.debug("_ calculated speed: {}".format(retime_speed)) speed *= retime_speed - log.debug("_ source_in: {}".format(source_in)) - log.debug("_ source_out: {}".format(source_out)) + # get speed from metadata if available + if tw_data.get("speed"): + speed = tw_data["speed"] + log.debug("_ metadata speed: {}".format(speed)) + log.debug("_ speed: {}".format(speed)) log.debug("_ source_duration: {}".format(source_duration)) log.debug("_ _clip_record_duration: {}".format(_clip_record_duration)) diff --git a/openpype/hosts/flame/plugins/create/create_shot_clip.py b/openpype/hosts/flame/plugins/create/create_shot_clip.py index fa239ea420..4fb041a4b2 100644 --- a/openpype/hosts/flame/plugins/create/create_shot_clip.py +++ b/openpype/hosts/flame/plugins/create/create_shot_clip.py @@ -23,10 +23,11 @@ class CreateShotClip(opfapi.Creator): # nested dictionary (only one level allowed # for sections and dict) for _k, _v in v["value"].items(): - if presets.get(_k): + if presets.get(_k) is not None: gui_inputs[k][ "value"][_k]["value"] = presets[_k] - if presets.get(k): + + if presets.get(k) is not None: gui_inputs[k]["value"] = presets[k] # open widget for plugins inputs @@ -276,6 +277,22 @@ class CreateShotClip(opfapi.Creator): "target": "tag", "toolTip": "By default handles are excluded", # noqa "order": 3 + }, + "retimedHandles": { + "value": True, + "type": "QCheckBox", + "label": "Retimed handles", + "target": "tag", + "toolTip": "By default handles are retimed.", # noqa + "order": 4 + }, + "retimedFramerange": { + "value": True, + "type": "QCheckBox", + "label": "Retimed framerange", + "target": "tag", + "toolTip": "By default framerange is retimed.", # noqa + "order": 5 } } } diff --git a/openpype/hosts/flame/plugins/load/load_clip.py b/openpype/hosts/flame/plugins/load/load_clip.py index b12f2f9690..0843dde76a 100644 --- a/openpype/hosts/flame/plugins/load/load_clip.py +++ b/openpype/hosts/flame/plugins/load/load_clip.py @@ -4,6 +4,7 @@ from pprint import pformat import openpype.hosts.flame.api as opfapi from openpype.lib import StringTemplate + class LoadClip(opfapi.ClipLoader): """Load a subset to timeline as clip @@ -60,8 +61,6 @@ class LoadClip(opfapi.ClipLoader): "path": self.fname.replace("\\", "/"), "colorspace": colorspace, "version": "v{:0>3}".format(version_name), - "logger": self.log - } self.log.debug(pformat( loading_context @@ -69,7 +68,8 @@ class LoadClip(opfapi.ClipLoader): self.log.debug(openclip_path) # make openpype clip file - opfapi.OpenClipSolver(openclip_path, loading_context).make() + opfapi.OpenClipSolver( + openclip_path, loading_context, logger=self.log).make() # prepare Reel group in actual desktop opc = self._get_clip( diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index fb4a3dc6e9..3b049b861b 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -64,8 +64,6 @@ class LoadClipBatch(opfapi.ClipLoader): "path": self.fname.replace("\\", "/"), "colorspace": colorspace, "version": "v{:0>3}".format(version_name), - "logger": self.log - } self.log.debug(pformat( loading_context @@ -73,7 +71,8 @@ class LoadClipBatch(opfapi.ClipLoader): self.log.debug(openclip_path) # make openpype clip file - opfapi.OpenClipSolver(openclip_path, loading_context).make() + opfapi.OpenClipSolver( + openclip_path, loading_context, logger=self.log).make() # prepare Reel group in actual desktop opc = self._get_clip( diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 992db62c75..76d48dded2 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -131,6 +131,9 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): "fps": self.fps, "workfileFrameStart": workfile_start, "sourceFirstFrame": int(first_frame), + "retimedHandles": marker_data.get("retimedHandles"), + "shotDurationFromSource": ( + not marker_data.get("retimedFramerange")), "path": file_path, "flameAddTasks": self.add_tasks, "tasks": { diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py index 0a9b0db334..917041e053 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py @@ -1,9 +1,9 @@ import pyblish.api -import openpype.lib as oplib -from openpype.pipeline import legacy_io import openpype.hosts.flame.api as opfapi from openpype.hosts.flame.otio import flame_export +from openpype.pipeline import legacy_io +from openpype.pipeline.create import get_subset_name class CollecTimelineOTIO(pyblish.api.ContextPlugin): @@ -24,11 +24,14 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin): sequence = opfapi.get_current_sequence(opfapi.CTX.selection) # create subset name - subset_name = oplib.get_subset_name_with_asset_doc( + subset_name = get_subset_name( family, variant, task_name, asset_doc, + context.data["projectName"], + context.data["hostName"], + project_settings=context.data["project_settings"] ) # adding otio timeline to context diff --git a/openpype/hosts/flame/plugins/publish/extract_otio_file.py b/openpype/hosts/flame/plugins/publish/extract_otio_file.py index 7dd75974fc..e5bfa42ce6 100644 --- a/openpype/hosts/flame/plugins/publish/extract_otio_file.py +++ b/openpype/hosts/flame/plugins/publish/extract_otio_file.py @@ -1,10 +1,10 @@ import os import pyblish.api -import openpype.api import opentimelineio as otio +from openpype.pipeline import publish -class ExtractOTIOFile(openpype.api.Extractor): +class ExtractOTIOFile(publish.Extractor): """ Extractor export OTIO file """ diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index d34f5d5854..d5294d61c2 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -1,18 +1,21 @@ import os import re import tempfile -from pprint import pformat from copy import deepcopy import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.flame import api as opfapi from openpype.hosts.flame.api import MediaInfoFile +from openpype.pipeline.editorial import ( + get_media_range_with_retimes +) import flame -class ExtractSubsetResources(openpype.api.Extractor): +class ExtractSubsetResources(publish.Extractor): """ Extractor for transcoding files from Flame clip """ @@ -47,7 +50,6 @@ class ExtractSubsetResources(openpype.api.Extractor): export_presets_mapping = {} def process(self, instance): - if not self.keep_original_representation: # remove previeous representation if not needed instance.data["representations"] = [] @@ -67,19 +69,77 @@ class ExtractSubsetResources(openpype.api.Extractor): # get media source first frame source_first_frame = instance.data["sourceFirstFrame"] + self.log.debug("_ frame_start: {}".format(frame_start)) + self.log.debug("_ source_first_frame: {}".format(source_first_frame)) + # get timeline in/out of segment clip_in = instance.data["clipIn"] clip_out = instance.data["clipOut"] + # get retimed attributres + retimed_data = self._get_retimed_attributes(instance) + + # get individual keys + retimed_handle_start = retimed_data["handle_start"] + retimed_handle_end = retimed_data["handle_end"] + retimed_source_duration = retimed_data["source_duration"] + retimed_speed = retimed_data["speed"] + # get handles value - take only the max from both handle_start = instance.data["handleStart"] - handle_end = instance.data["handleStart"] + handle_end = instance.data["handleEnd"] handles = max(handle_start, handle_end) + include_handles = instance.data.get("includeHandles") + retimed_handles = instance.data.get("retimedHandles") # get media source range with handles source_start_handles = instance.data["sourceStartH"] source_end_handles = instance.data["sourceEndH"] + # retime if needed + if retimed_speed != 1.0: + if retimed_handles: + # handles are retimed + source_start_handles = ( + instance.data["sourceStart"] - retimed_handle_start) + source_end_handles = ( + source_start_handles + + (retimed_source_duration - 1) + + retimed_handle_start + + retimed_handle_end + ) + + else: + # handles are not retimed + source_end_handles = ( + source_start_handles + + (retimed_source_duration - 1) + + handle_start + + handle_end + ) + + # get frame range with handles for representation range + frame_start_handle = frame_start - handle_start + repre_frame_start = frame_start_handle + if include_handles: + if retimed_speed == 1.0 or not retimed_handles: + frame_start_handle = frame_start + else: + frame_start_handle = ( + frame_start - handle_start) + retimed_handle_start + + self.log.debug("_ frame_start_handle: {}".format( + frame_start_handle)) + self.log.debug("_ repre_frame_start: {}".format( + repre_frame_start)) + + # calculate duration with handles + source_duration_handles = ( + source_end_handles - source_start_handles) + 1 + + self.log.debug("_ source_duration_handles: {}".format( + source_duration_handles)) + # create staging dir path staging_dir = self.staging_dir(instance) @@ -93,6 +153,43 @@ class ExtractSubsetResources(openpype.api.Extractor): } export_presets.update(self.export_presets_mapping) + if not instance.data.get("versionData"): + instance.data["versionData"] = {} + + # set versiondata if any retime + version_data = retimed_data.get("version_data") + self.log.debug("_ version_data: {}".format(version_data)) + + if version_data: + instance.data["versionData"].update(version_data) + + # version data start frame + version_frame_start = frame_start + if include_handles: + version_frame_start = frame_start_handle + if retimed_speed != 1.0: + if retimed_handles: + instance.data["versionData"].update({ + "frameStart": version_frame_start, + "frameEnd": ( + (version_frame_start + source_duration_handles - 1) + - (retimed_handle_start + retimed_handle_end) + ) + }) + else: + instance.data["versionData"].update({ + "handleStart": handle_start, + "handleEnd": handle_end, + "frameStart": version_frame_start, + "frameEnd": ( + (version_frame_start + source_duration_handles - 1) + - (handle_start + handle_end) + ) + }) + self.log.debug("_ version_data: {}".format( + instance.data["versionData"] + )) + # loop all preset names and for unique_name, preset_config in export_presets.items(): modify_xml_data = {} @@ -115,20 +212,10 @@ class ExtractSubsetResources(openpype.api.Extractor): ) ) - # get frame range with handles for representation range - frame_start_handle = frame_start - handle_start - - # calculate duration with handles - source_duration_handles = ( - source_end_handles - source_start_handles) - - # define in/out marks - in_mark = (source_start_handles - source_first_frame) + 1 - out_mark = in_mark + source_duration_handles - exporting_clip = None name_patern_xml = "_{}.".format( unique_name) + if export_type == "Sequence Publish": # change export clip to sequence exporting_clip = flame.duplicate(sequence_clip) @@ -142,19 +229,25 @@ class ExtractSubsetResources(openpype.api.Extractor): "__{}.").format( unique_name) - # change in/out marks to timeline in/out + # only for h264 with baked retime in_mark = clip_in - out_mark = clip_out + out_mark = clip_out + 1 + modify_xml_data.update({ + "exportHandles": True, + "nbHandles": handles + }) else: + in_mark = (source_start_handles - source_first_frame) + 1 + out_mark = in_mark + source_duration_handles exporting_clip = self.import_clip(clip_path) exporting_clip.name.set_value("{}_{}".format( asset_name, segment_name)) # add xml tags modifications modify_xml_data.update({ - "exportHandles": True, - "nbHandles": handles, - "startFrame": frame_start, + # enum position low start from 0 + "frameIndex": 0, + "startFrame": repre_frame_start, "namePattern": name_patern_xml }) @@ -162,6 +255,9 @@ class ExtractSubsetResources(openpype.api.Extractor): # add any xml overrides collected form segment.comment modify_xml_data.update(instance.data["xml_overrides"]) + self.log.debug("_ in_mark: {}".format(in_mark)) + self.log.debug("_ out_mark: {}".format(out_mark)) + export_kwargs = {} # validate xml preset file is filled if preset_file == "": @@ -196,9 +292,8 @@ class ExtractSubsetResources(openpype.api.Extractor): "namePattern": "__thumbnail" }) thumb_frame_number = int(in_mark + ( - source_duration_handles / 2)) + (out_mark - in_mark + 1) / 2)) - self.log.debug("__ in_mark: {}".format(in_mark)) self.log.debug("__ thumb_frame_number: {}".format( thumb_frame_number )) @@ -210,9 +305,6 @@ class ExtractSubsetResources(openpype.api.Extractor): "out_mark": out_mark }) - self.log.debug("__ modify_xml_data: {}".format( - pformat(modify_xml_data) - )) preset_path = opfapi.modify_preset_file( preset_orig_xml_path, staging_dir, modify_xml_data) @@ -281,9 +373,9 @@ class ExtractSubsetResources(openpype.api.Extractor): # add frame range if preset_config["representation_add_range"]: representation_data.update({ - "frameStart": frame_start_handle, + "frameStart": repre_frame_start, "frameEnd": ( - frame_start_handle + source_duration_handles), + repre_frame_start + source_duration_handles) - 1, "fps": instance.data["fps"] }) @@ -300,8 +392,32 @@ class ExtractSubsetResources(openpype.api.Extractor): # at the end remove the duplicated clip flame.delete(exporting_clip) - self.log.debug("All representations: {}".format( - pformat(instance.data["representations"]))) + def _get_retimed_attributes(self, instance): + handle_start = instance.data["handleStart"] + handle_end = instance.data["handleEnd"] + + # get basic variables + otio_clip = instance.data["otioClip"] + + # get available range trimmed with processed retimes + retimed_attributes = get_media_range_with_retimes( + otio_clip, handle_start, handle_end) + self.log.debug( + ">> retimed_attributes: {}".format(retimed_attributes)) + + r_media_in = int(retimed_attributes["mediaIn"]) + r_media_out = int(retimed_attributes["mediaOut"]) + version_data = retimed_attributes.get("versionData") + + return { + "version_data": version_data, + "handle_start": int(retimed_attributes["handleStart"]), + "handle_end": int(retimed_attributes["handleEnd"]), + "source_duration": ( + (r_media_out - r_media_in) + 1 + ), + "speed": float(retimed_attributes["speed"]) + } def _should_skip(self, preset_config, clip_path, unique_name): # get activating attributes @@ -313,8 +429,6 @@ class ExtractSubsetResources(openpype.api.Extractor): unique_name, activated_preset, filter_path_regex ) ) - self.log.debug( - "__ clip_path: `{}`".format(clip_path)) # skip if not activated presete if not activated_preset: diff --git a/openpype/hosts/flame/startup/openpype_in_flame.py b/openpype/hosts/flame/startup/openpype_in_flame.py index f2ac23b19e..d07aaa6b7d 100644 --- a/openpype/hosts/flame/startup/openpype_in_flame.py +++ b/openpype/hosts/flame/startup/openpype_in_flame.py @@ -73,6 +73,8 @@ def load_apps(): opfapi.FlameMenuProjectConnect(opfapi.CTX.app_framework)) opfapi.CTX.flame_apps.append( opfapi.FlameMenuTimeline(opfapi.CTX.app_framework)) + opfapi.CTX.flame_apps.append( + opfapi.FlameMenuUniversal(opfapi.CTX.app_framework)) opfapi.CTX.app_framework.log.info("Apps are loaded") @@ -191,3 +193,27 @@ def get_timeline_custom_ui_actions(): openpype_install() return _build_app_menu("FlameMenuTimeline") + + +def get_batch_custom_ui_actions(): + """Hook to create submenu in batch + + Returns: + list: menu object + """ + # install openpype and the host + openpype_install() + + return _build_app_menu("FlameMenuUniversal") + + +def get_media_panel_custom_ui_actions(): + """Hook to create submenu in desktop + + Returns: + list: menu object + """ + # install openpype and the host + openpype_install() + + return _build_app_menu("FlameMenuUniversal") diff --git a/openpype/hosts/fusion/__init__.py b/openpype/hosts/fusion/__init__.py index e69de29bb2..ddae01890b 100644 --- a/openpype/hosts/fusion/__init__.py +++ b/openpype/hosts/fusion/__init__.py @@ -0,0 +1,10 @@ +from .addon import ( + FusionAddon, + FUSION_HOST_DIR, +) + + +__all__ = ( + "FusionAddon", + "FUSION_HOST_DIR", +) diff --git a/openpype/hosts/fusion/addon.py b/openpype/hosts/fusion/addon.py new file mode 100644 index 0000000000..1913cc2e30 --- /dev/null +++ b/openpype/hosts/fusion/addon.py @@ -0,0 +1,32 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +FUSION_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class FusionAddon(OpenPypeModule, IHostAddon): + name = "fusion" + host_name = "fusion" + + def initialize(self, module_settings): + self.enabled = True + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(FUSION_HOST_DIR, "hooks") + ] + + def add_implementation_envs(self, env, _app): + # Set default values if are not already set via settings + defaults = { + "OPENPYPE_LOG_NO_COLORS": "Yes" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_workfile_extensions(self): + return [".comp"] diff --git a/openpype/hosts/fusion/api/__init__.py b/openpype/hosts/fusion/api/__init__.py index 19d1e092fe..ed70dbca50 100644 --- a/openpype/hosts/fusion/api/__init__.py +++ b/openpype/hosts/fusion/api/__init__.py @@ -5,10 +5,7 @@ from .pipeline import ( ls, imprint_container, - parse_container, - - get_current_comp, - comp_lock_and_undo_chunk + parse_container ) from .workio import ( @@ -22,8 +19,10 @@ from .workio import ( from .lib import ( maintained_selection, - get_additional_data, - update_frame_range + update_frame_range, + set_asset_framerange, + get_current_comp, + comp_lock_and_undo_chunk ) from .menu import launch_openpype_menu @@ -38,9 +37,6 @@ __all__ = [ "imprint_container", "parse_container", - "get_current_comp", - "comp_lock_and_undo_chunk", - # workio "open_file", "save_file", @@ -51,8 +47,10 @@ __all__ = [ # lib "maintained_selection", - "get_additional_data", "update_frame_range", + "set_asset_framerange", + "get_current_comp", + "comp_lock_and_undo_chunk", # menu "launch_openpype_menu", diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index 001eb636ee..a33e5cf289 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -3,8 +3,7 @@ import sys import re import contextlib -from Qt import QtGui - +from openpype.lib import Logger from openpype.client import ( get_asset_by_name, get_subset_by_name, @@ -17,13 +16,14 @@ from openpype.pipeline import ( switch_container, legacy_io, ) -from .pipeline import get_current_comp, comp_lock_and_undo_chunk +from openpype.pipeline.context_tools import get_current_project_asset self = sys.modules[__name__] self._project = None -def update_frame_range(start, end, comp=None, set_render_range=True): +def update_frame_range(start, end, comp=None, set_render_range=True, + handle_start=0, handle_end=0): """Set Fusion comp's start and end frame range Args: @@ -32,6 +32,8 @@ def update_frame_range(start, end, comp=None, set_render_range=True): comp (object, Optional): comp object from fusion set_render_range (bool, Optional): When True this will also set the composition's render start and end frame. + handle_start (float, int, Optional): frame handles before start frame + handle_end (float, int, Optional): frame handles after end frame Returns: None @@ -41,11 +43,16 @@ def update_frame_range(start, end, comp=None, set_render_range=True): if not comp: comp = get_current_comp() + # Convert any potential none type to zero + handle_start = handle_start or 0 + handle_end = handle_end or 0 + attrs = { - "COMPN_GlobalStart": start, - "COMPN_GlobalEnd": end + "COMPN_GlobalStart": start - handle_start, + "COMPN_GlobalEnd": end + handle_end } + # set frame range if set_render_range: attrs.update({ "COMPN_RenderStart": start, @@ -56,24 +63,122 @@ def update_frame_range(start, end, comp=None, set_render_range=True): comp.SetAttrs(attrs) -def get_additional_data(container): - """Get Fusion related data for the container +def set_asset_framerange(): + """Set Comp's frame range based on current asset""" + asset_doc = get_current_project_asset() + start = asset_doc["data"]["frameStart"] + end = asset_doc["data"]["frameEnd"] + handle_start = asset_doc["data"]["handleStart"] + handle_end = asset_doc["data"]["handleEnd"] + update_frame_range(start, end, set_render_range=True, + handle_start=handle_start, + handle_end=handle_end) - Args: - container(dict): the container found by the ls() function - Returns: - dict +def set_asset_resolution(): + """Set Comp's resolution width x height default based on current asset""" + asset_doc = get_current_project_asset() + width = asset_doc["data"]["resolutionWidth"] + height = asset_doc["data"]["resolutionHeight"] + comp = get_current_comp() + + print("Setting comp frame format resolution to {}x{}".format(width, + height)) + comp.SetPrefs({ + "Comp.FrameFormat.Width": width, + "Comp.FrameFormat.Height": height, + }) + + +def validate_comp_prefs(comp=None, force_repair=False): + """Validate current comp defaults with asset settings. + + Validates fps, resolutionWidth, resolutionHeight, aspectRatio. + + This does *not* validate frameStart, frameEnd, handleStart and handleEnd. """ - tool = container["_tool"] - tile_color = tool.TileColor - if tile_color is None: - return {} + if comp is None: + comp = get_current_comp() - return {"color": QtGui.QColor.fromRgbF(tile_color["R"], - tile_color["G"], - tile_color["B"])} + log = Logger.get_logger("validate_comp_prefs") + + fields = [ + "name", + "data.fps", + "data.resolutionWidth", + "data.resolutionHeight", + "data.pixelAspect" + ] + asset_doc = get_current_project_asset(fields=fields) + asset_data = asset_doc["data"] + + comp_frame_format_prefs = comp.GetPrefs("Comp.FrameFormat") + + # Pixel aspect ratio in Fusion is set as AspectX and AspectY so we convert + # the data to something that is more sensible to Fusion + asset_data["pixelAspectX"] = asset_data.pop("pixelAspect") + asset_data["pixelAspectY"] = 1.0 + + validations = [ + ("fps", "Rate", "FPS"), + ("resolutionWidth", "Width", "Resolution Width"), + ("resolutionHeight", "Height", "Resolution Height"), + ("pixelAspectX", "AspectX", "Pixel Aspect Ratio X"), + ("pixelAspectY", "AspectY", "Pixel Aspect Ratio Y") + ] + + invalid = [] + for key, comp_key, label in validations: + asset_value = asset_data[key] + comp_value = comp_frame_format_prefs.get(comp_key) + if asset_value != comp_value: + invalid_msg = "{} {} should be {}".format(label, + comp_value, + asset_value) + invalid.append(invalid_msg) + + if not force_repair: + # Do not log warning if we force repair anyway + log.warning( + "Comp {pref} {value} does not match asset " + "'{asset_name}' {pref} {asset_value}".format( + pref=label, + value=comp_value, + asset_name=asset_doc["name"], + asset_value=asset_value) + ) + + if invalid: + + def _on_repair(): + attributes = dict() + for key, comp_key, _label in validations: + value = asset_data[key] + comp_key_full = "Comp.FrameFormat.{}".format(comp_key) + attributes[comp_key_full] = value + comp.SetPrefs(attributes) + + if force_repair: + log.info("Applying default Comp preferences..") + _on_repair() + return + + from . import menu + from openpype.widgets import popup + from openpype.style import load_stylesheet + dialog = popup.Popup(parent=menu.menu) + dialog.setWindowTitle("Fusion comp has invalid configuration") + + msg = "Comp preferences mismatches '{}'".format(asset_doc["name"]) + msg += "\n" + "\n".join(invalid) + dialog.setMessage(msg) + dialog.setButtonText("Repair") + dialog.on_clicked.connect(_on_repair) + dialog.show() + dialog.raise_() + dialog.activateWindow() + dialog.setStyleSheet(load_stylesheet()) def switch_item(container, @@ -195,3 +300,21 @@ def get_frame_path(path): padding = 4 # default Fusion padding return filename, padding, ext + + +def get_current_comp(): + """Hack to get current comp in this session""" + fusion = getattr(sys.modules["__main__"], "fusion", None) + return fusion.CurrentComp if fusion else None + + +@contextlib.contextmanager +def comp_lock_and_undo_chunk(comp, undo_queue_name="Script CMD"): + """Lock comp and open an undo chunk during the context""" + try: + comp.Lock() + comp.StartUndo(undo_queue_name) + yield + finally: + comp.Unlock() + comp.EndUndo() diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index 6234322d7f..39126935e6 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -1,43 +1,26 @@ -import os import sys -from Qt import QtWidgets, QtCore +from Qt import QtWidgets, QtCore, QtGui -from openpype import style from openpype.tools.utils import host_tools - +from openpype.style import load_stylesheet +from openpype.lib import register_event_callback from openpype.hosts.fusion.scripts import ( set_rendermode, duplicate_with_inputs ) +from openpype.hosts.fusion.api.lib import ( + set_asset_framerange, + set_asset_resolution +) +from openpype.pipeline import legacy_io +from openpype.resources import get_openpype_icon_filepath +from .pipeline import FusionEventHandler +from .pulse import FusionPulse -def load_stylesheet(): - path = os.path.join(os.path.dirname(__file__), "menu_style.qss") - if not os.path.exists(path): - print("Unable to load stylesheet, file not found in resources") - return "" - - with open(path, "r") as file_stream: - stylesheet = file_stream.read() - return stylesheet - - -class Spacer(QtWidgets.QWidget): - def __init__(self, height, *args, **kwargs): - super(Spacer, self).__init__(*args, **kwargs) - - self.setFixedHeight(height) - - real_spacer = QtWidgets.QWidget(self) - real_spacer.setObjectName("Spacer") - real_spacer.setFixedHeight(height) - - layout = QtWidgets.QVBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.addWidget(real_spacer) - - self.setLayout(layout) +self = sys.modules[__name__] +self.menu = None class OpenPypeMenu(QtWidgets.QWidget): @@ -46,15 +29,29 @@ class OpenPypeMenu(QtWidgets.QWidget): self.setObjectName("OpenPypeMenu") + icon_path = get_openpype_icon_filepath() + icon = QtGui.QIcon(icon_path) + self.setWindowIcon(icon) + self.setWindowFlags( QtCore.Qt.Window | QtCore.Qt.CustomizeWindowHint | QtCore.Qt.WindowTitleHint + | QtCore.Qt.WindowMinimizeButtonHint | QtCore.Qt.WindowCloseButtonHint | QtCore.Qt.WindowStaysOnTopHint ) self.render_mode_widget = None self.setWindowTitle("OpenPype") + + asset_label = QtWidgets.QLabel("Context", self) + asset_label.setStyleSheet("""QLabel { + font-size: 14px; + font-weight: 600; + color: #5f9fb8; + }""") + asset_label.setAlignment(QtCore.Qt.AlignHCenter) + workfiles_btn = QtWidgets.QPushButton("Workfiles...", self) create_btn = QtWidgets.QPushButton("Create...", self) publish_btn = QtWidgets.QPushButton("Publish...", self) @@ -62,77 +59,111 @@ class OpenPypeMenu(QtWidgets.QWidget): manager_btn = QtWidgets.QPushButton("Manage...", self) libload_btn = QtWidgets.QPushButton("Library...", self) rendermode_btn = QtWidgets.QPushButton("Set render mode...", self) + set_framerange_btn = QtWidgets.QPushButton("Set Frame Range", self) + set_resolution_btn = QtWidgets.QPushButton("Set Resolution", self) duplicate_with_inputs_btn = QtWidgets.QPushButton( "Duplicate with input connections", self ) - reset_resolution_btn = QtWidgets.QPushButton( - "Reset Resolution from project", self - ) layout = QtWidgets.QVBoxLayout(self) layout.setContentsMargins(10, 20, 10, 20) + layout.addWidget(asset_label) + + layout.addSpacing(20) + layout.addWidget(workfiles_btn) + + layout.addSpacing(20) + layout.addWidget(create_btn) - layout.addWidget(publish_btn) layout.addWidget(load_btn) + layout.addWidget(publish_btn) layout.addWidget(manager_btn) - layout.addWidget(Spacer(15, self)) + layout.addSpacing(20) layout.addWidget(libload_btn) - layout.addWidget(Spacer(15, self)) + layout.addSpacing(20) + layout.addWidget(set_framerange_btn) + layout.addWidget(set_resolution_btn) layout.addWidget(rendermode_btn) - layout.addWidget(Spacer(15, self)) + layout.addSpacing(20) layout.addWidget(duplicate_with_inputs_btn) - layout.addWidget(reset_resolution_btn) self.setLayout(layout) + # Store reference so we can update the label + self.asset_label = asset_label + workfiles_btn.clicked.connect(self.on_workfile_clicked) create_btn.clicked.connect(self.on_create_clicked) publish_btn.clicked.connect(self.on_publish_clicked) load_btn.clicked.connect(self.on_load_clicked) manager_btn.clicked.connect(self.on_manager_clicked) libload_btn.clicked.connect(self.on_libload_clicked) - rendermode_btn.clicked.connect(self.on_rendernode_clicked) + rendermode_btn.clicked.connect(self.on_rendermode_clicked) duplicate_with_inputs_btn.clicked.connect( self.on_duplicate_with_inputs_clicked) - reset_resolution_btn.clicked.connect(self.on_reset_resolution_clicked) + set_resolution_btn.clicked.connect(self.on_set_resolution_clicked) + set_framerange_btn.clicked.connect(self.on_set_framerange_clicked) + + self._callbacks = [] + self.register_callback("taskChanged", self.on_task_changed) + self.on_task_changed() + + # Force close current process if Fusion is closed + self._pulse = FusionPulse(parent=self) + self._pulse.start() + + # Detect Fusion events as OpenPype events + self._event_handler = FusionEventHandler(parent=self) + self._event_handler.start() + + def on_task_changed(self): + # Update current context label + label = legacy_io.Session["AVALON_ASSET"] + self.asset_label.setText(label) + + def register_callback(self, name, fn): + + # Create a wrapper callback that we only store + # for as long as we want it to persist as callback + def _callback(*args): + fn() + + self._callbacks.append(_callback) + register_event_callback(name, _callback) + + def deregister_all_callbacks(self): + self._callbacks[:] = [] def on_workfile_clicked(self): - print("Clicked Workfile") host_tools.show_workfiles() def on_create_clicked(self): - print("Clicked Create") host_tools.show_creator() def on_publish_clicked(self): - print("Clicked Publish") host_tools.show_publish() def on_load_clicked(self): - print("Clicked Load") host_tools.show_loader(use_context=True) def on_manager_clicked(self): - print("Clicked Manager") host_tools.show_scene_inventory() def on_libload_clicked(self): - print("Clicked Library") host_tools.show_library_loader() - def on_rendernode_clicked(self): - print("Clicked Set Render Mode") + def on_rendermode_clicked(self): if self.render_mode_widget is None: window = set_rendermode.SetRenderMode() - window.setStyleSheet(style.load_stylesheet()) + window.setStyleSheet(load_stylesheet()) window.show() self.render_mode_widget = window else: @@ -140,15 +171,16 @@ class OpenPypeMenu(QtWidgets.QWidget): def on_duplicate_with_inputs_clicked(self): duplicate_with_inputs.duplicate_with_input_connections() - print("Clicked Set Colorspace") - def on_reset_resolution_clicked(self): - print("Clicked Reset Resolution") + def on_set_resolution_clicked(self): + set_asset_resolution() + + def on_set_framerange_clicked(self): + set_asset_framerange() def launch_openpype_menu(): app = QtWidgets.QApplication(sys.argv) - app.setQuitOnLastWindowClosed(False) pype_menu = OpenPypeMenu() @@ -156,5 +188,8 @@ def launch_openpype_menu(): pype_menu.setStyleSheet(stylesheet) pype_menu.show() + self.menu = pype_menu - sys.exit(app.exec_()) + result = app.exec_() + print("Shutting down..") + sys.exit(result) diff --git a/openpype/hosts/fusion/api/menu_style.qss b/openpype/hosts/fusion/api/menu_style.qss deleted file mode 100644 index 12c474b070..0000000000 --- a/openpype/hosts/fusion/api/menu_style.qss +++ /dev/null @@ -1,29 +0,0 @@ -QWidget { - background-color: #282828; - border-radius: 3; -} - -QPushButton { - border: 1px solid #090909; - background-color: #201f1f; - color: #ffffff; - padding: 5; -} - -QPushButton:focus { - background-color: "#171717"; - color: #d0d0d0; -} - -QPushButton:hover { - background-color: "#171717"; - color: #e64b3d; -} - -#OpenPypeMenu { - border: 1px solid #fef9ef; -} - -#Spacer { - background-color: #282828; -} diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 54002f9f51..b6092f7c1b 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -4,11 +4,15 @@ Basic avalon integration import os import sys import logging -import contextlib import pyblish.api +from Qt import QtCore -from openpype.api import Logger +from openpype.lib import ( + Logger, + register_event_callback, + emit_event +) from openpype.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, @@ -18,12 +22,19 @@ from openpype.pipeline import ( deregister_inventory_action_path, AVALON_CONTAINER_ID, ) -import openpype.hosts.fusion +from openpype.pipeline.load import any_outdated_containers +from openpype.hosts.fusion import FUSION_HOST_DIR +from openpype.tools.utils import host_tools -log = Logger().get_logger(__name__) +from .lib import ( + get_current_comp, + comp_lock_and_undo_chunk, + validate_comp_prefs +) -HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.fusion.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +log = Logger.get_logger(__name__) + +PLUGINS_DIR = os.path.join(FUSION_HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") @@ -31,16 +42,32 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -class CompLogHandler(logging.Handler): +class FusionLogHandler(logging.Handler): + # Keep a reference to fusion's Print function (Remote Object) + _print = None + + @property + def print(self): + if self._print is not None: + # Use cached + return self._print + + _print = getattr(sys.modules["__main__"], "fusion").Print + if _print is None: + # Backwards compatibility: Print method on Fusion instance was + # added around Fusion 17.4 and wasn't available on PyRemote Object + # before + _print = get_current_comp().Print + self._print = _print + return _print + def emit(self, record): entry = self.format(record) - comp = get_current_comp() - if comp: - comp.Print(entry) + self.print(entry) def install(): - """Install fusion-specific functionality of avalon-core. + """Install fusion-specific functionality of OpenPype. This is where you install menus and register families, data and loaders into fusion. @@ -52,20 +79,18 @@ def install(): """ # Remove all handlers associated with the root logger object, because - # that one sometimes logs as "warnings" incorrectly. + # that one always logs as "warnings" incorrectly. for handler in logging.root.handlers[:]: logging.root.removeHandler(handler) # Attach default logging handler that prints to active comp logger = logging.getLogger() formatter = logging.Formatter(fmt="%(message)s\n") - handler = CompLogHandler() + handler = FusionLogHandler() handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) - log.info("openpype.hosts.fusion installed") - pyblish.api.register_host("fusion") pyblish.api.register_plugin_path(PUBLISH_PATH) log.info("Registering Fusion plug-ins..") @@ -78,6 +103,11 @@ def install(): "instanceToggled", on_pyblish_instance_toggled ) + # Register events + register_event_callback("open", on_after_open) + register_event_callback("save", on_save) + register_event_callback("new", on_new) + def uninstall(): """Uninstall all that was installed @@ -103,7 +133,7 @@ def uninstall(): ) -def on_pyblish_instance_toggled(instance, new_value, old_value): +def on_pyblish_instance_toggled(instance, old_value, new_value): """Toggle saver tool passthrough states on instance toggles.""" comp = instance.context.data.get("currentComp") if not comp: @@ -126,6 +156,48 @@ def on_pyblish_instance_toggled(instance, new_value, old_value): tool.SetAttrs({"TOOLB_PassThrough": passthrough}) +def on_new(event): + comp = event["Rets"]["comp"] + validate_comp_prefs(comp, force_repair=True) + + +def on_save(event): + comp = event["sender"] + validate_comp_prefs(comp) + + +def on_after_open(event): + comp = event["sender"] + validate_comp_prefs(comp) + + if any_outdated_containers(): + log.warning("Scene has outdated content.") + + # Find OpenPype menu to attach to + from . import menu + + def _on_show_scene_inventory(): + # ensure that comp is active + frame = comp.CurrentFrame + if not frame: + print("Comp is closed, skipping show scene inventory") + return + frame.ActivateFrame() # raise comp window + host_tools.show_scene_inventory() + + from openpype.widgets import popup + from openpype.style import load_stylesheet + dialog = popup.Popup(parent=menu.menu) + dialog.setWindowTitle("Fusion comp has outdated content") + dialog.setMessage("There are outdated containers in " + "your Fusion comp.") + dialog.on_clicked.connect(_on_show_scene_inventory) + dialog.show() + dialog.raise_() + dialog.activateWindow() + dialog.setStyleSheet(load_stylesheet()) + + def ls(): """List containers from active Fusion scene @@ -139,7 +211,7 @@ def ls(): """ comp = get_current_comp() - tools = comp.GetToolList(False, "Loader").values() + tools = comp.GetToolList(False).values() for tool in tools: container = parse_container(tool) @@ -211,19 +283,114 @@ def parse_container(tool): return container -def get_current_comp(): - """Hack to get current comp in this session""" - fusion = getattr(sys.modules["__main__"], "fusion", None) - return fusion.CurrentComp if fusion else None +class FusionEventThread(QtCore.QThread): + """QThread which will periodically ping Fusion app for any events. + + The fusion.UIManager must be set up to be notified of events before they'll + be reported by this thread, for example: + fusion.UIManager.AddNotify("Comp_Save", None) + + """ + + on_event = QtCore.Signal(dict) + + def run(self): + + app = getattr(sys.modules["__main__"], "app", None) + if app is None: + # No Fusion app found + return + + # As optimization store the GetEvent method directly because every + # getattr of UIManager.GetEvent tries to resolve the Remote Function + # through the PyRemoteObject + get_event = app.UIManager.GetEvent + delay = int(os.environ.get("OPENPYPE_FUSION_CALLBACK_INTERVAL", 1000)) + while True: + if self.isInterruptionRequested(): + return + + # Process all events that have been queued up until now + while True: + event = get_event(False) + if not event: + break + self.on_event.emit(event) + + # Wait some time before processing events again + # to not keep blocking the UI + self.msleep(delay) -@contextlib.contextmanager -def comp_lock_and_undo_chunk(comp, undo_queue_name="Script CMD"): - """Lock comp and open an undo chunk during the context""" - try: - comp.Lock() - comp.StartUndo(undo_queue_name) - yield - finally: - comp.Unlock() - comp.EndUndo() +class FusionEventHandler(QtCore.QObject): + """Emits OpenPype events based on Fusion events captured in a QThread. + + This will emit the following OpenPype events based on Fusion actions: + save: Comp_Save, Comp_SaveAs + open: Comp_Opened + new: Comp_New + + To use this you can attach it to you Qt UI so it runs in the background. + E.g. + >>> handler = FusionEventHandler(parent=window) + >>> handler.start() + + + """ + ACTION_IDS = [ + "Comp_Save", + "Comp_SaveAs", + "Comp_New", + "Comp_Opened" + ] + + def __init__(self, parent=None): + super(FusionEventHandler, self).__init__(parent=parent) + + # Set up Fusion event callbacks + fusion = getattr(sys.modules["__main__"], "fusion", None) + ui = fusion.UIManager + + # Add notifications for the ones we want to listen to + notifiers = [] + for action_id in self.ACTION_IDS: + notifier = ui.AddNotify(action_id, None) + notifiers.append(notifier) + + # TODO: Not entirely sure whether these must be kept to avoid + # garbage collection + self._notifiers = notifiers + + self._event_thread = FusionEventThread(parent=self) + self._event_thread.on_event.connect(self._on_event) + + def start(self): + self._event_thread.start() + + def stop(self): + self._event_thread.stop() + + def _on_event(self, event): + """Handle Fusion events to emit OpenPype events""" + if not event: + return + + what = event["what"] + + # Comp Save + if what in {"Comp_Save", "Comp_SaveAs"}: + if not event["Rets"].get("success"): + # If the Save action is cancelled it will still emit an + # event but with "success": False so we ignore those cases + return + # Comp was saved + emit_event("save", data=event) + return + + # Comp New + elif what in {"Comp_New"}: + emit_event("new", data=event) + + # Comp Opened + elif what in {"Comp_Opened"}: + emit_event("open", data=event) diff --git a/openpype/hosts/fusion/api/pulse.py b/openpype/hosts/fusion/api/pulse.py new file mode 100644 index 0000000000..eb7ef3785d --- /dev/null +++ b/openpype/hosts/fusion/api/pulse.py @@ -0,0 +1,63 @@ +import os +import sys + +from Qt import QtCore + + +class PulseThread(QtCore.QThread): + no_response = QtCore.Signal() + + def __init__(self, parent=None): + super(PulseThread, self).__init__(parent=parent) + + def run(self): + app = getattr(sys.modules["__main__"], "app", None) + + # Interval in milliseconds + interval = os.environ.get("OPENPYPE_FUSION_PULSE_INTERVAL", 1000) + + while True: + if self.isInterruptionRequested(): + return + + # We don't need to call Test because PyRemoteObject of the app + # will actually fail to even resolve the Test function if it has + # gone down. So we can actually already just check by confirming + # the method is still getting resolved. (Optimization) + if app.Test is None: + self.no_response.emit() + + self.msleep(interval) + + +class FusionPulse(QtCore.QObject): + """A Timer that checks whether host app is still alive. + + This checks whether the Fusion process is still active at a certain + interval. This is useful due to how Fusion runs its scripts. Each script + runs in its own environment and process (a `fusionscript` process each). + If Fusion would go down and we have a UI process running at the same time + then it can happen that the `fusionscript.exe` will remain running in the + background in limbo due to e.g. a Qt interface's QApplication that keeps + running infinitely. + + Warning: + When the host is not detected this will automatically exit + the current process. + + """ + + def __init__(self, parent=None): + super(FusionPulse, self).__init__(parent=parent) + self._thread = PulseThread(parent=self) + self._thread.no_response.connect(self.on_no_response) + + def on_no_response(self): + print("Pulse detected no response from Fusion..") + sys.exit(1) + + def start(self): + self._thread.start() + + def stop(self): + self._thread.requestInterruption() diff --git a/openpype/hosts/fusion/api/workio.py b/openpype/hosts/fusion/api/workio.py index a1710c6e3a..939b2ff4be 100644 --- a/openpype/hosts/fusion/api/workio.py +++ b/openpype/hosts/fusion/api/workio.py @@ -2,13 +2,11 @@ import sys import os -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS - -from .pipeline import get_current_comp +from .lib import get_current_comp def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["fusion"] + return [".comp"] def has_unsaved_changes(): diff --git a/openpype/hosts/fusion/deploy/Config/openpype_menu.fu b/openpype/hosts/fusion/deploy/Config/openpype_menu.fu new file mode 100644 index 0000000000..8b8d448259 --- /dev/null +++ b/openpype/hosts/fusion/deploy/Config/openpype_menu.fu @@ -0,0 +1,60 @@ +{ + Action + { + ID = "OpenPype_Menu", + Category = "OpenPype", + Name = "OpenPype Menu", + + Targets = + { + Composition = + { + Execute = _Lua [=[ + local scriptPath = app:MapPath("OpenPype:MenuScripts/openpype_menu.py") + if bmd.fileexists(scriptPath) == false then + print("[OpenPype Error] Can't run file: " .. scriptPath) + else + target:RunScript(scriptPath) + end + ]=], + }, + }, + }, + Action + { + ID = "OpenPype_Install_PySide2", + Category = "OpenPype", + Name = "Install PySide2", + + Targets = + { + Composition = + { + Execute = _Lua [=[ + local scriptPath = app:MapPath("OpenPype:MenuScripts/install_pyside2.py") + if bmd.fileexists(scriptPath) == false then + print("[OpenPype Error] Can't run file: " .. scriptPath) + else + target:RunScript(scriptPath) + end + ]=], + }, + }, + }, + Menus + { + Target = "ChildFrame", + + Before "Help" + { + Sub "OpenPype" + { + "OpenPype_Menu{}", + "_", + Sub "Admin" { + "OpenPype_Install_PySide2{}" + } + } + }, + }, +} diff --git a/openpype/hosts/fusion/deploy/MenuScripts/README.md b/openpype/hosts/fusion/deploy/MenuScripts/README.md new file mode 100644 index 0000000000..f87eaea4a2 --- /dev/null +++ b/openpype/hosts/fusion/deploy/MenuScripts/README.md @@ -0,0 +1,6 @@ +### OpenPype deploy MenuScripts + +Note that this `MenuScripts` is not an official Fusion folder. +OpenPype only uses this folder in `{fusion}/deploy/` to trigger the OpenPype menu actions. + +They are used in the actions defined in `.fu` files in `{fusion}/deploy/Config`. \ No newline at end of file diff --git a/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py b/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py new file mode 100644 index 0000000000..ab9f13ce05 --- /dev/null +++ b/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py @@ -0,0 +1,29 @@ +# This is just a quick hack for users running Py3 locally but having no +# Qt library installed +import os +import subprocess +import importlib + + +try: + from Qt import QtWidgets # noqa: F401 + from Qt import __binding__ + print(f"Qt binding: {__binding__}") + mod = importlib.import_module(__binding__) + print(f"Qt path: {mod.__file__}") + print("Qt library found, nothing to do..") + +except ImportError: + print("Assuming no Qt library is installed..") + print('Installing PySide2 for Python 3.6: ' + f'{os.environ["FUSION16_PYTHON36_HOME"]}') + + # Get full path to python executable + exe = "python.exe" if os.name == 'nt' else "python" + python = os.path.join(os.environ["FUSION16_PYTHON36_HOME"], exe) + assert os.path.exists(python), f"Python doesn't exist: {python}" + + # Do python -m pip install PySide2 + args = [python, "-m", "pip", "install", "PySide2"] + print(f"Args: {args}") + subprocess.Popen(args) diff --git a/openpype/hosts/fusion/deploy/MenuScripts/openpype_menu.py b/openpype/hosts/fusion/deploy/MenuScripts/openpype_menu.py new file mode 100644 index 0000000000..2918c552c8 --- /dev/null +++ b/openpype/hosts/fusion/deploy/MenuScripts/openpype_menu.py @@ -0,0 +1,35 @@ +import os +import sys + +from openpype.lib import Logger +from openpype.pipeline import ( + install_host, + registered_host, +) + + +def main(env): + # This script working directory starts in Fusion application folder. + # However the contents of that folder can conflict with Qt library dlls + # so we make sure to move out of it to avoid DLL Load Failed errors. + os.chdir("..") + from openpype.hosts.fusion import api + from openpype.hosts.fusion.api import menu + + # activate resolve from pype + install_host(api) + + log = Logger.get_logger(__name__) + log.info(f"Registered host: {registered_host()}") + + menu.launch_openpype_menu() + + # Initiate a QTimer to check if Fusion is still alive every X interval + # If Fusion is not found - kill itself + # todo(roy): Implement timer that ensures UI doesn't remain when e.g. + # Fusion closes down + + +if __name__ == "__main__": + result = main(os.environ) + sys.exit(not bool(result)) diff --git a/openpype/hosts/fusion/utility_scripts/32bit/backgrounds_selected_to32bit.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/backgrounds_selected_to32bit.py similarity index 100% rename from openpype/hosts/fusion/utility_scripts/32bit/backgrounds_selected_to32bit.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/backgrounds_selected_to32bit.py diff --git a/openpype/hosts/fusion/utility_scripts/32bit/backgrounds_to32bit.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/backgrounds_to32bit.py similarity index 100% rename from openpype/hosts/fusion/utility_scripts/32bit/backgrounds_to32bit.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/backgrounds_to32bit.py diff --git a/openpype/hosts/fusion/utility_scripts/32bit/loaders_selected_to32bit.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/loaders_selected_to32bit.py similarity index 100% rename from openpype/hosts/fusion/utility_scripts/32bit/loaders_selected_to32bit.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/loaders_selected_to32bit.py diff --git a/openpype/hosts/fusion/utility_scripts/32bit/loaders_to32bit.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/loaders_to32bit.py similarity index 100% rename from openpype/hosts/fusion/utility_scripts/32bit/loaders_to32bit.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/loaders_to32bit.py diff --git a/openpype/hosts/fusion/utility_scripts/switch_ui.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/switch_ui.py similarity index 100% rename from openpype/hosts/fusion/utility_scripts/switch_ui.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/switch_ui.py diff --git a/openpype/hosts/fusion/utility_scripts/update_loader_ranges.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/update_loader_ranges.py similarity index 100% rename from openpype/hosts/fusion/utility_scripts/update_loader_ranges.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/update_loader_ranges.py diff --git a/openpype/hosts/fusion/deploy/fusion_shared.prefs b/openpype/hosts/fusion/deploy/fusion_shared.prefs new file mode 100644 index 0000000000..998c6a6d66 --- /dev/null +++ b/openpype/hosts/fusion/deploy/fusion_shared.prefs @@ -0,0 +1,19 @@ +{ +Locked = true, +Global = { + Paths = { + Map = { + ["OpenPype:"] = "$(OPENPYPE_FUSION)/deploy", + ["Reactor:"] = "$(REACTOR)", + + ["Config:"] = "UserPaths:Config;OpenPype:Config", + ["Scripts:"] = "UserPaths:Scripts;Reactor:System/Scripts;OpenPype:Scripts", + ["UserPaths:"] = "UserData:;AllData:;Fusion:;Reactor:Deploy" + }, + }, + Script = { + PythonVersion = 3, + Python3Forced = true + }, + }, +} \ No newline at end of file diff --git a/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py b/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py new file mode 100644 index 0000000000..d1ae5f64fd --- /dev/null +++ b/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py @@ -0,0 +1,34 @@ +import os +import platform + +from openpype.lib import PreLaunchHook, ApplicationLaunchFailed + + +class FusionPreLaunchOCIO(PreLaunchHook): + """Set OCIO environment variable for Fusion""" + app_groups = ["fusion"] + + def execute(self): + """Hook entry method.""" + + # get image io + project_settings = self.data["project_settings"] + + # make sure anatomy settings are having flame key + imageio_fusion = project_settings["fusion"]["imageio"] + + ocio = imageio_fusion.get("ocio") + enabled = ocio.get("enabled", False) + if not enabled: + return + + platform_key = platform.system().lower() + ocio_path = ocio["configFilePath"][platform_key] + if not ocio_path: + raise ApplicationLaunchFailed( + "Fusion OCIO is enabled in project settings but no OCIO config" + f"path is set for your current platform: {platform_key}" + ) + + self.log.info(f"Setting OCIO config path: {ocio_path}") + self.launch_context.env["OCIO"] = os.pathsep.join(ocio_path) diff --git a/openpype/hosts/fusion/hooks/pre_fusion_setup.py b/openpype/hosts/fusion/hooks/pre_fusion_setup.py index e635a0ea74..d043d54322 100644 --- a/openpype/hosts/fusion/hooks/pre_fusion_setup.py +++ b/openpype/hosts/fusion/hooks/pre_fusion_setup.py @@ -1,114 +1,61 @@ import os -import shutil - -import openpype.hosts.fusion from openpype.lib import PreLaunchHook, ApplicationLaunchFailed +from openpype.hosts.fusion import FUSION_HOST_DIR class FusionPrelaunch(PreLaunchHook): - """ - This hook will check if current workfile path has Fusion - project inside. + """Prepares OpenPype Fusion environment + + Requires FUSION_PYTHON3_HOME to be defined in the environment for Fusion + to point at a valid Python 3 build for Fusion. That is Python 3.3-3.10 + for Fusion 18 and Fusion 3.6 for Fusion 16 and 17. + + This also sets FUSION16_MasterPrefs to apply the fusion master prefs + as set in openpype/hosts/fusion/deploy/fusion_shared.prefs to enable + the OpenPype menu and force Python 3 over Python 2. + """ app_groups = ["fusion"] def execute(self): - # making sure python 3.6 is installed at provided path - py36_dir = self.launch_context.env.get("PYTHON36") - if not py36_dir: + # making sure python 3 is installed at provided path + # Py 3.3-3.10 for Fusion 18+ or Py 3.6 for Fu 16-17 + py3_var = "FUSION_PYTHON3_HOME" + fusion_python3_home = self.launch_context.env.get(py3_var, "") + + self.log.info(f"Looking for Python 3 in: {fusion_python3_home}") + for path in fusion_python3_home.split(os.pathsep): + # Allow defining multiple paths to allow "fallback" to other + # path. But make to set only a single path as final variable. + py3_dir = os.path.normpath(path) + if os.path.isdir(py3_dir): + break + else: raise ApplicationLaunchFailed( - "Required environment variable \"PYTHON36\" is not set." - "\n\nFusion implementation requires to have" - " installed Python 3.6" + "Python 3 is not installed at the provided path.\n" + "Make sure the environment in fusion settings has " + "'FUSION_PYTHON3_HOME' set correctly and make sure " + "Python 3 is installed in the given path." + f"\n\nPYTHON36: {fusion_python3_home}" ) - py36_dir = os.path.normpath(py36_dir) - if not os.path.isdir(py36_dir): - raise ApplicationLaunchFailed( - "Python 3.6 is not installed at the provided path.\n" - "Either make sure the environments in fusion settings has" - " 'PYTHON36' set corectly or make sure Python 3.6 is installed" - f" in the given path.\n\nPYTHON36: {py36_dir}" - ) - self.log.info(f"Path to Fusion Python folder: '{py36_dir}'...") - self.launch_context.env["PYTHON36"] = py36_dir + self.log.info(f"Setting {py3_var}: '{py3_dir}'...") + self.launch_context.env[py3_var] = py3_dir - utility_dir = self.launch_context.env.get("FUSION_UTILITY_SCRIPTS_DIR") - if not utility_dir: - raise ApplicationLaunchFailed( - "Required Fusion utility script dir environment variable" - " \"FUSION_UTILITY_SCRIPTS_DIR\" is not set." - ) + # Fusion 18+ requires FUSION_PYTHON3_HOME to also be on PATH + self.launch_context.env["PATH"] += ";" + py3_dir - # setting utility scripts dir for scripts syncing - utility_dir = os.path.normpath(utility_dir) - if not os.path.isdir(utility_dir): - raise ApplicationLaunchFailed( - "Fusion utility script dir does not exist. Either make sure " - "the environments in fusion settings has" - " 'FUSION_UTILITY_SCRIPTS_DIR' set correctly or reinstall " - f"Fusion.\n\nFUSION_UTILITY_SCRIPTS_DIR: '{utility_dir}'" - ) + # Fusion 16 and 17 use FUSION16_PYTHON36_HOME instead of + # FUSION_PYTHON3_HOME and will only work with a Python 3.6 version + # TODO: Detect Fusion version to only set for specific Fusion build + self.launch_context.env["FUSION16_PYTHON36_HOME"] = py3_dir - self._sync_utility_scripts(self.launch_context.env) - self.log.info("Fusion Pype wrapper has been installed") + # Add our Fusion Master Prefs which is the only way to customize + # Fusion to define where it can read custom scripts and tools from + self.log.info(f"Setting OPENPYPE_FUSION: {FUSION_HOST_DIR}") + self.launch_context.env["OPENPYPE_FUSION"] = FUSION_HOST_DIR - def _sync_utility_scripts(self, env): - """ Synchronizing basic utlility scripts for resolve. - - To be able to run scripts from inside `Fusion/Workspace/Scripts` menu - all scripts has to be accessible from defined folder. - """ - if not env: - env = {k: v for k, v in os.environ.items()} - - # initiate inputs - scripts = {} - us_env = env.get("FUSION_UTILITY_SCRIPTS_SOURCE_DIR") - us_dir = env.get("FUSION_UTILITY_SCRIPTS_DIR", "") - us_paths = [os.path.join( - os.path.dirname(os.path.abspath(openpype.hosts.fusion.__file__)), - "utility_scripts" - )] - - # collect script dirs - if us_env: - self.log.info(f"Utility Scripts Env: `{us_env}`") - us_paths = us_env.split( - os.pathsep) + us_paths - - # collect scripts from dirs - for path in us_paths: - scripts.update({path: os.listdir(path)}) - - self.log.info(f"Utility Scripts Dir: `{us_paths}`") - self.log.info(f"Utility Scripts: `{scripts}`") - - # make sure no script file is in folder - if next((s for s in os.listdir(us_dir)), None): - for s in os.listdir(us_dir): - path = os.path.normpath( - os.path.join(us_dir, s)) - self.log.info(f"Removing `{path}`...") - - # remove file or directory if not in our folders - if not os.path.isdir(path): - os.remove(path) - else: - shutil.rmtree(path) - - # copy scripts into Resolve's utility scripts dir - for d, sl in scripts.items(): - # directory and scripts list - for s in sl: - # script in script list - src = os.path.normpath(os.path.join(d, s)) - dst = os.path.normpath(os.path.join(us_dir, s)) - - self.log.info(f"Copying `{src}` to `{dst}`...") - - # copy file or directory from our folders to fusion's folder - if not os.path.isdir(src): - shutil.copy2(src, dst) - else: - shutil.copytree(src, dst) + pref_var = "FUSION16_MasterPrefs" # used by Fusion 16, 17 and 18 + prefs = os.path.join(FUSION_HOST_DIR, "deploy", "fusion_shared.prefs") + self.log.info(f"Setting {pref_var}: {prefs}") + self.launch_context.env[pref_var] = prefs diff --git a/openpype/hosts/fusion/plugins/create/create_exr_saver.py b/openpype/hosts/fusion/plugins/create/create_exr_saver.py index 8bab5ee9b1..6d93fe710a 100644 --- a/openpype/hosts/fusion/plugins/create/create_exr_saver.py +++ b/openpype/hosts/fusion/plugins/create/create_exr_saver.py @@ -1,6 +1,9 @@ import os -from openpype.pipeline import LegacyCreator +from openpype.pipeline import ( + LegacyCreator, + legacy_io +) from openpype.hosts.fusion.api import ( get_current_comp, comp_lock_and_undo_chunk @@ -21,12 +24,9 @@ class CreateOpenEXRSaver(LegacyCreator): comp = get_current_comp() - # todo: improve method of getting current environment - # todo: pref avalon.Session over os.environ + workdir = os.path.normpath(legacy_io.Session["AVALON_WORKDIR"]) - workdir = os.path.normpath(os.environ["AVALON_WORKDIR"]) - - filename = "{}..tiff".format(self.name) + filename = "{}..exr".format(self.name) filepath = os.path.join(workdir, "render", filename) with comp_lock_and_undo_chunk(comp): @@ -39,10 +39,10 @@ class CreateOpenEXRSaver(LegacyCreator): saver["Clip"] = filepath saver["OutputFormat"] = file_format - # # # Set standard TIFF settings + # Check file format settings are available if saver[file_format] is None: - raise RuntimeError("File format is not set to TiffFormat, " - "this is a bug") + raise RuntimeError("File format is not set to {}, " + "this is a bug".format(file_format)) # Set file format attributes saver[file_format]["Depth"] = 1 # int8 | int16 | float32 | other diff --git a/openpype/hosts/fusion/plugins/load/load_alembic.py b/openpype/hosts/fusion/plugins/load/load_alembic.py new file mode 100644 index 0000000000..f8b8c2cb0a --- /dev/null +++ b/openpype/hosts/fusion/plugins/load/load_alembic.py @@ -0,0 +1,70 @@ +from openpype.pipeline import ( + load, + get_representation_path, +) +from openpype.hosts.fusion.api import ( + imprint_container, + get_current_comp, + comp_lock_and_undo_chunk +) + + +class FusionLoadAlembicMesh(load.LoaderPlugin): + """Load Alembic mesh into Fusion""" + + families = ["pointcache", "model"] + representations = ["abc"] + + label = "Load alembic mesh" + order = -10 + icon = "code-fork" + color = "orange" + + tool_type = "SurfaceAlembicMesh" + + def load(self, context, name, namespace, data): + # Fallback to asset name when namespace is None + if namespace is None: + namespace = context['asset']['name'] + + # Create the Loader with the filename path set + comp = get_current_comp() + with comp_lock_and_undo_chunk(comp, "Create tool"): + + path = self.fname + + args = (-32768, -32768) + tool = comp.AddTool(self.tool_type, *args) + tool["Filename"] = path + + imprint_container(tool, + name=name, + namespace=namespace, + context=context, + loader=self.__class__.__name__) + + def switch(self, container, representation): + self.update(container, representation) + + def update(self, container, representation): + """Update Alembic path""" + + tool = container["_tool"] + assert tool.ID == self.tool_type, f"Must be {self.tool_type}" + comp = tool.Comp() + + path = get_representation_path(representation) + + with comp_lock_and_undo_chunk(comp, "Update tool"): + tool["Filename"] = path + + # Update the imprinted representation + tool.SetData("avalon.representation", str(representation["_id"])) + + def remove(self, container): + tool = container["_tool"] + assert tool.ID == self.tool_type, f"Must be {self.tool_type}" + comp = tool.Comp() + + with comp_lock_and_undo_chunk(comp, "Remove tool"): + tool.Delete() diff --git a/openpype/hosts/fusion/plugins/load/load_fbx.py b/openpype/hosts/fusion/plugins/load/load_fbx.py new file mode 100644 index 0000000000..70fe82ffef --- /dev/null +++ b/openpype/hosts/fusion/plugins/load/load_fbx.py @@ -0,0 +1,71 @@ + +from openpype.pipeline import ( + load, + get_representation_path, +) +from openpype.hosts.fusion.api import ( + imprint_container, + get_current_comp, + comp_lock_and_undo_chunk +) + + +class FusionLoadFBXMesh(load.LoaderPlugin): + """Load FBX mesh into Fusion""" + + families = ["*"] + representations = ["fbx"] + + label = "Load FBX mesh" + order = -10 + icon = "code-fork" + color = "orange" + + tool_type = "SurfaceFBXMesh" + + def load(self, context, name, namespace, data): + # Fallback to asset name when namespace is None + if namespace is None: + namespace = context['asset']['name'] + + # Create the Loader with the filename path set + comp = get_current_comp() + with comp_lock_and_undo_chunk(comp, "Create tool"): + + path = self.fname + + args = (-32768, -32768) + tool = comp.AddTool(self.tool_type, *args) + tool["ImportFile"] = path + + imprint_container(tool, + name=name, + namespace=namespace, + context=context, + loader=self.__class__.__name__) + + def switch(self, container, representation): + self.update(container, representation) + + def update(self, container, representation): + """Update path""" + + tool = container["_tool"] + assert tool.ID == self.tool_type, f"Must be {self.tool_type}" + comp = tool.Comp() + + path = get_representation_path(representation) + + with comp_lock_and_undo_chunk(comp, "Update tool"): + tool["ImportFile"] = path + + # Update the imprinted representation + tool.SetData("avalon.representation", str(representation["_id"])) + + def remove(self, container): + tool = container["_tool"] + assert tool.ID == self.tool_type, f"Must be {self.tool_type}" + comp = tool.Comp() + + with comp_lock_and_undo_chunk(comp, "Remove tool"): + tool.Delete() diff --git a/openpype/hosts/fusion/plugins/load/load_sequence.py b/openpype/hosts/fusion/plugins/load/load_sequence.py index abd0f4e411..6f44c61d1b 100644 --- a/openpype/hosts/fusion/plugins/load/load_sequence.py +++ b/openpype/hosts/fusion/plugins/load/load_sequence.py @@ -101,6 +101,9 @@ def loader_shift(loader, frame, relative=True): else: shift = frame - old_in + if not shift: + return 0 + # Shifting global in will try to automatically compensate for the change # in the "ClipTimeStart" and "HoldFirstFrame" inputs, so we preserve those # input values to "just shift" the clip @@ -149,9 +152,8 @@ class FusionLoadSequence(load.LoaderPlugin): tool["Clip"] = path # Set global in point to start frame (if in version.data) - start = context["version"]["data"].get("frameStart", None) - if start is not None: - loader_shift(tool, start, relative=False) + start = self._get_start(context["version"], tool) + loader_shift(tool, start, relative=False) imprint_container(tool, name=name, @@ -214,12 +216,7 @@ class FusionLoadSequence(load.LoaderPlugin): # Get start frame from version data project_name = legacy_io.active_project() version = get_version_by_id(project_name, representation["parent"]) - start = version["data"].get("frameStart") - if start is None: - self.log.warning("Missing start frame for updated version" - "assuming starts at frame 0 for: " - "{} ({})".format(tool.Name, representation)) - start = 0 + start = self._get_start(version, tool) with comp_lock_and_undo_chunk(comp, "Update Loader"): @@ -256,3 +253,27 @@ class FusionLoadSequence(load.LoaderPlugin): """Get first file in representation root""" files = sorted(os.listdir(root)) return os.path.join(root, files[0]) + + def _get_start(self, version_doc, tool): + """Return real start frame of published files (incl. handles)""" + data = version_doc["data"] + + # Get start frame directly with handle if it's in data + start = data.get("frameStartHandle") + if start is not None: + return start + + # Get frame start without handles + start = data.get("frameStart") + if start is None: + self.log.warning("Missing start frame for version " + "assuming starts at frame 0 for: " + "{}".format(tool.Name)) + return 0 + + # Use `handleStart` if the data is available + handle_start = data.get("handleStart") + if handle_start: + start -= handle_start + + return start diff --git a/openpype/hosts/fusion/plugins/publish/collect_inputs.py b/openpype/hosts/fusion/plugins/publish/collect_inputs.py new file mode 100644 index 0000000000..8f9857b02f --- /dev/null +++ b/openpype/hosts/fusion/plugins/publish/collect_inputs.py @@ -0,0 +1,114 @@ +from bson.objectid import ObjectId + +import pyblish.api + +from openpype.pipeline import registered_host + + +def collect_input_containers(tools): + """Collect containers that contain any of the node in `nodes`. + + This will return any loaded Avalon container that contains at least one of + the nodes. As such, the Avalon container is an input for it. Or in short, + there are member nodes of that container. + + Returns: + list: Input avalon containers + + """ + + # Lookup by node ids + lookup = frozenset([tool.Name for tool in tools]) + + containers = [] + host = registered_host() + for container in host.ls(): + + name = container["_tool"].Name + + # We currently assume no "groups" as containers but just single tools + # like a single "Loader" operator. As such we just check whether the + # Loader is part of the processing queue. + if name in lookup: + containers.append(container) + + return containers + + +def iter_upstream(tool): + """Yields all upstream inputs for the current tool. + + Yields: + tool: The input tools. + + """ + + def get_connected_input_tools(tool): + """Helper function that returns connected input tools for a tool.""" + inputs = [] + + # Filter only to actual types that will have sensible upstream + # connections. So we ignore just "Number" inputs as they can be + # many to iterate, slowing things down quite a bit - and in practice + # they don't have upstream connections. + VALID_INPUT_TYPES = ['Image', 'Particles', 'Mask', 'DataType3D'] + for type_ in VALID_INPUT_TYPES: + for input_ in tool.GetInputList(type_).values(): + output = input_.GetConnectedOutput() + if output: + input_tool = output.GetTool() + inputs.append(input_tool) + + return inputs + + # Initialize process queue with the node's inputs itself + queue = get_connected_input_tools(tool) + + # We keep track of which node names we have processed so far, to ensure we + # don't process the same hierarchy again. We are not pushing the tool + # itself into the set as that doesn't correctly recognize the same tool. + # Since tool names are unique in a comp in Fusion we rely on that. + collected = set(tool.Name for tool in queue) + + # Traverse upstream references for all nodes and yield them as we + # process the queue. + while queue: + upstream_tool = queue.pop() + yield upstream_tool + + # Find upstream tools that are not collected yet. + upstream_inputs = get_connected_input_tools(upstream_tool) + upstream_inputs = [t for t in upstream_inputs if + t.Name not in collected] + + queue.extend(upstream_inputs) + collected.update(tool.Name for tool in upstream_inputs) + + +class CollectUpstreamInputs(pyblish.api.InstancePlugin): + """Collect source input containers used for this publish. + + This will include `inputs` data of which loaded publishes were used in the + generation of this publish. This leaves an upstream trace to what was used + as input. + + """ + + label = "Collect Inputs" + order = pyblish.api.CollectorOrder + 0.2 + hosts = ["fusion"] + + def process(self, instance): + + # Get all upstream and include itself + tool = instance[0] + nodes = list(iter_upstream(tool)) + nodes.append(tool) + + # Collect containers for the given set of nodes + containers = collect_input_containers(nodes) + + inputs = [ObjectId(c["representation"]) for c in containers] + instance.data["inputRepresentations"] = inputs + + self.log.info("Collected inputs: %s" % inputs) diff --git a/openpype/hosts/fusion/plugins/publish/collect_instances.py b/openpype/hosts/fusion/plugins/publish/collect_instances.py index b2192d1dd9..fe60b83827 100644 --- a/openpype/hosts/fusion/plugins/publish/collect_instances.py +++ b/openpype/hosts/fusion/plugins/publish/collect_instances.py @@ -4,19 +4,21 @@ import pyblish.api def get_comp_render_range(comp): - """Return comp's start and end render range.""" + """Return comp's start-end render range and global start-end range.""" comp_attrs = comp.GetAttrs() start = comp_attrs["COMPN_RenderStart"] end = comp_attrs["COMPN_RenderEnd"] + global_start = comp_attrs["COMPN_GlobalStart"] + global_end = comp_attrs["COMPN_GlobalEnd"] # Whenever render ranges are undefined fall back # to the comp's global start and end if start == -1000000000: - start = comp_attrs["COMPN_GlobalEnd"] + start = global_start if end == -1000000000: - end = comp_attrs["COMPN_GlobalStart"] + end = global_end - return start, end + return start, end, global_start, global_end class CollectInstances(pyblish.api.ContextPlugin): @@ -42,9 +44,11 @@ class CollectInstances(pyblish.api.ContextPlugin): tools = comp.GetToolList(False).values() savers = [tool for tool in tools if tool.ID == "Saver"] - start, end = get_comp_render_range(comp) + start, end, global_start, global_end = get_comp_render_range(comp) context.data["frameStart"] = int(start) context.data["frameEnd"] = int(end) + context.data["frameStartHandle"] = int(global_start) + context.data["frameEndHandle"] = int(global_end) for tool in savers: path = tool["Clip"][comp.TIME_UNDEFINED] @@ -78,8 +82,10 @@ class CollectInstances(pyblish.api.ContextPlugin): "label": label, "frameStart": context.data["frameStart"], "frameEnd": context.data["frameEnd"], + "frameStartHandle": context.data["frameStartHandle"], + "frameEndHandle": context.data["frameStartHandle"], "fps": context.data["fps"], - "families": ["render", "review", "ftrack"], + "families": ["render", "review"], "family": "render", "active": active, "publish": active # backwards compatibility diff --git a/openpype/hosts/fusion/plugins/publish/increment_current_file_deadline.py b/openpype/hosts/fusion/plugins/publish/increment_current_file_deadline.py index 6483454d96..5c595638e9 100644 --- a/openpype/hosts/fusion/plugins/publish/increment_current_file_deadline.py +++ b/openpype/hosts/fusion/plugins/publish/increment_current_file_deadline.py @@ -17,9 +17,9 @@ class FusionIncrementCurrentFile(pyblish.api.ContextPlugin): def process(self, context): from openpype.lib import version_up - from openpype.action import get_errored_plugins_from_data + from openpype.pipeline.publish import get_errored_plugins_from_context - errored_plugins = get_errored_plugins_from_data(context) + errored_plugins = get_errored_plugins_from_context(context) if any(plugin.__name__ == "FusionSubmitDeadline" for plugin in errored_plugins): raise RuntimeError("Skipping incrementing current file because " diff --git a/openpype/hosts/fusion/plugins/publish/render_local.py b/openpype/hosts/fusion/plugins/publish/render_local.py index 601c2ffccf..79e458b40a 100644 --- a/openpype/hosts/fusion/plugins/publish/render_local.py +++ b/openpype/hosts/fusion/plugins/publish/render_local.py @@ -20,6 +20,8 @@ class Fusionlocal(pyblish.api.InstancePlugin): def process(self, instance): + # This plug-in runs only once and thus assumes all instances + # currently will render the same frame range context = instance.context key = "__hasRun{}".format(self.__class__.__name__) if context.data.get(key, False): @@ -28,8 +30,8 @@ class Fusionlocal(pyblish.api.InstancePlugin): context.data[key] = True current_comp = context.data["currentComp"] - frame_start = current_comp.GetAttrs("COMPN_RenderStart") - frame_end = current_comp.GetAttrs("COMPN_RenderEnd") + frame_start = context.data["frameStartHandle"] + frame_end = context.data["frameEndHandle"] path = instance.data["path"] output_dir = instance.data["outputDir"] @@ -40,7 +42,11 @@ class Fusionlocal(pyblish.api.InstancePlugin): self.log.info("End frame: {}".format(frame_end)) with comp_lock_and_undo_chunk(current_comp): - result = current_comp.Render() + result = current_comp.Render({ + "Start": frame_start, + "End": frame_end, + "Wait": True + }) if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/openpype/hosts/fusion/plugins/publish/validate_background_depth.py b/openpype/hosts/fusion/plugins/publish/validate_background_depth.py index a0734d8278..4268fab528 100644 --- a/openpype/hosts/fusion/plugins/publish/validate_background_depth.py +++ b/openpype/hosts/fusion/plugins/publish/validate_background_depth.py @@ -1,6 +1,6 @@ import pyblish.api -from openpype import action +from openpype.pipeline.publish import RepairAction class ValidateBackgroundDepth(pyblish.api.InstancePlugin): @@ -8,7 +8,7 @@ class ValidateBackgroundDepth(pyblish.api.InstancePlugin): order = pyblish.api.ValidatorOrder label = "Validate Background Depth 32 bit" - actions = [action.RepairAction] + actions = [RepairAction] hosts = ["fusion"] families = ["render"] optional = True diff --git a/openpype/hosts/fusion/plugins/publish/validate_create_folder_checked.py b/openpype/hosts/fusion/plugins/publish/validate_create_folder_checked.py index 45ed53f65c..f6beefefc1 100644 --- a/openpype/hosts/fusion/plugins/publish/validate_create_folder_checked.py +++ b/openpype/hosts/fusion/plugins/publish/validate_create_folder_checked.py @@ -1,6 +1,6 @@ import pyblish.api -from openpype import action +from openpype.pipeline.publish import RepairAction class ValidateCreateFolderChecked(pyblish.api.InstancePlugin): @@ -11,7 +11,7 @@ class ValidateCreateFolderChecked(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - actions = [action.RepairAction] + actions = [RepairAction] label = "Validate Create Folder Checked" families = ["render"] hosts = ["fusion"] diff --git a/openpype/hosts/fusion/scripts/fusion_switch_shot.py b/openpype/hosts/fusion/scripts/fusion_switch_shot.py deleted file mode 100644 index 49ef340679..0000000000 --- a/openpype/hosts/fusion/scripts/fusion_switch_shot.py +++ /dev/null @@ -1,284 +0,0 @@ -import os -import re -import sys -import logging - -from openpype.client import ( - get_asset_by_name, - get_versions, -) -from openpype.pipeline import ( - legacy_io, - install_host, - registered_host, -) -from openpype.lib import version_up -from openpype.hosts.fusion import api -from openpype.hosts.fusion.api import lib -from openpype.pipeline.context_tools import get_workdir_from_session - -log = logging.getLogger("Update Slap Comp") - - -def _format_version_folder(folder): - """Format a version folder based on the filepath - - Assumption here is made that, if the path does not exists the folder - will be "v001" - - Args: - folder: file path to a folder - - Returns: - str: new version folder name - """ - - new_version = 1 - if os.path.isdir(folder): - re_version = re.compile(r"v\d+$") - versions = [i for i in os.listdir(folder) if os.path.isdir(i) - and re_version.match(i)] - if versions: - # ensure the "v" is not included - new_version = int(max(versions)[1:]) + 1 - - version_folder = "v{:03d}".format(new_version) - - return version_folder - - -def _get_fusion_instance(): - fusion = getattr(sys.modules["__main__"], "fusion", None) - if fusion is None: - try: - # Support for FuScript.exe, BlackmagicFusion module for py2 only - import BlackmagicFusion as bmf - fusion = bmf.scriptapp("Fusion") - except ImportError: - raise RuntimeError("Could not find a Fusion instance") - return fusion - - -def _format_filepath(session): - - project = session["AVALON_PROJECT"] - asset = session["AVALON_ASSET"] - - # Save updated slap comp - work_path = get_workdir_from_session(session) - walk_to_dir = os.path.join(work_path, "scenes", "slapcomp") - slapcomp_dir = os.path.abspath(walk_to_dir) - - # Ensure destination exists - if not os.path.isdir(slapcomp_dir): - log.warning("Folder did not exist, creating folder structure") - os.makedirs(slapcomp_dir) - - # Compute output path - new_filename = "{}_{}_slapcomp_v001.comp".format(project, asset) - new_filepath = os.path.join(slapcomp_dir, new_filename) - - # Create new unique filepath - if os.path.exists(new_filepath): - new_filepath = version_up(new_filepath) - - return new_filepath - - -def _update_savers(comp, session): - """Update all savers of the current comp to ensure the output is correct - - This will refactor the Saver file outputs to the renders of the new session - that is provided. - - In the case the original saver path had a path set relative to a /fusion/ - folder then that relative path will be matched with the exception of all - "version" (e.g. v010) references will be reset to v001. Otherwise only a - version folder will be computed in the new session's work "render" folder - to dump the files in and keeping the original filenames. - - Args: - comp (object): current comp instance - session (dict): the current Avalon session - - Returns: - None - """ - - new_work = get_workdir_from_session(session) - renders = os.path.join(new_work, "renders") - version_folder = _format_version_folder(renders) - renders_version = os.path.join(renders, version_folder) - - comp.Print("New renders to: %s\n" % renders) - - with api.comp_lock_and_undo_chunk(comp): - savers = comp.GetToolList(False, "Saver").values() - for saver in savers: - filepath = saver.GetAttrs("TOOLST_Clip_Name")[1.0] - - # Get old relative path to the "fusion" app folder so we can apply - # the same relative path afterwards. If not found fall back to - # using just a version folder with the filename in it. - # todo: can we make this less magical? - relpath = filepath.replace("\\", "/").rsplit("/fusion/", 1)[-1] - - if os.path.isabs(relpath): - # If not relative to a "/fusion/" folder then just use filename - filename = os.path.basename(filepath) - log.warning("Can't parse relative path, refactoring to only" - "filename in a version folder: %s" % filename) - new_path = os.path.join(renders_version, filename) - - else: - # Else reuse the relative path - # Reset version in folder and filename in the relative path - # to v001. The version should be is only detected when prefixed - # with either `_v` (underscore) or `/v` (folder) - version_pattern = r"(/|_)v[0-9]+" - if re.search(version_pattern, relpath): - new_relpath = re.sub(version_pattern, - r"\1v001", - relpath) - log.info("Resetting version folders to v001: " - "%s -> %s" % (relpath, new_relpath)) - relpath = new_relpath - - new_path = os.path.join(new_work, relpath) - - saver["Clip"] = new_path - - -def update_frame_range(comp, representations): - """Update the frame range of the comp and render length - - The start and end frame are based on the lowest start frame and the highest - end frame - - Args: - comp (object): current focused comp - representations (list) collection of dicts - - Returns: - None - - """ - - project_name = legacy_io.active_project() - version_ids = {r["parent"] for r in representations} - versions = list(get_versions(project_name, version_ids)) - - versions = [v for v in versions - if v["data"].get("frameStart", None) is not None] - - if not versions: - log.warning("No versions loaded to match frame range to.\n") - return - - start = min(v["data"]["frameStart"] for v in versions) - end = max(v["data"]["frameEnd"] for v in versions) - - lib.update_frame_range(start, end, comp=comp) - - -def switch(asset_name, filepath=None, new=True): - """Switch the current containers of the file to the other asset (shot) - - Args: - filepath (str): file path of the comp file - asset_name (str): name of the asset (shot) - new (bool): Save updated comp under a different name - - Returns: - comp path (str): new filepath of the updated comp - - """ - - # If filepath provided, ensure it is valid absolute path - if filepath is not None: - if not os.path.isabs(filepath): - filepath = os.path.abspath(filepath) - - assert os.path.exists(filepath), "%s must exist " % filepath - - # Assert asset name exists - # It is better to do this here then to wait till switch_shot does it - project_name = legacy_io.active_project() - asset = get_asset_by_name(project_name, asset_name) - assert asset, "Could not find '%s' in the database" % asset_name - - # Go to comp - if not filepath: - current_comp = api.get_current_comp() - assert current_comp is not None, "Could not find current comp" - else: - fusion = _get_fusion_instance() - current_comp = fusion.LoadComp(filepath, quiet=True) - assert current_comp is not None, ( - "Fusion could not load '{}'").format(filepath) - - host = registered_host() - containers = list(host.ls()) - assert containers, "Nothing to update" - - representations = [] - for container in containers: - try: - representation = lib.switch_item( - container, - asset_name=asset_name) - representations.append(representation) - except Exception as e: - current_comp.Print("Error in switching! %s\n" % e.message) - - message = "Switched %i Loaders of the %i\n" % (len(representations), - len(containers)) - current_comp.Print(message) - - # Build the session to switch to - switch_to_session = legacy_io.Session.copy() - switch_to_session["AVALON_ASSET"] = asset['name'] - - if new: - comp_path = _format_filepath(switch_to_session) - - # Update savers output based on new session - _update_savers(current_comp, switch_to_session) - else: - comp_path = version_up(filepath) - - current_comp.Print(comp_path) - - current_comp.Print("\nUpdating frame range") - update_frame_range(current_comp, representations) - - current_comp.Save(comp_path) - - return comp_path - - -if __name__ == '__main__': - - # QUESTION: can we convert this to gui rather then standalone script? - # TODO: convert to gui tool - import argparse - - parser = argparse.ArgumentParser(description="Switch to a shot within an" - "existing comp file") - - parser.add_argument("--file_path", - type=str, - default=True, - help="File path of the comp to use") - - parser.add_argument("--asset_name", - type=str, - default=True, - help="Name of the asset (shot) to switch") - - args, unknown = parser.parse_args() - - install_host(api) - switch(args.asset_name, args.file_path) - - sys.exit(0) diff --git a/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py b/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py deleted file mode 100644 index de8fc4b3b4..0000000000 --- a/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py +++ /dev/null @@ -1,27 +0,0 @@ -import os -import sys - -from openpype.api import Logger -from openpype.pipeline import ( - install_host, - registered_host, -) - -log = Logger().get_logger(__name__) - - -def main(env): - from openpype.hosts.fusion import api - from openpype.hosts.fusion.api import menu - - # activate resolve from pype - install_host(api) - - log.info(f"Registered host: {registered_host()}") - - menu.launch_openpype_menu() - - -if __name__ == "__main__": - result = main(os.environ) - sys.exit(not bool(result)) diff --git a/openpype/hosts/harmony/__init__.py b/openpype/hosts/harmony/__init__.py index d2f710d83d..9177eaa285 100644 --- a/openpype/hosts/harmony/__init__.py +++ b/openpype/hosts/harmony/__init__.py @@ -1,11 +1,10 @@ -import os +from .addon import ( + HARMONY_HOST_DIR, + HarmonyAddon, +) -def add_implementation_envs(env, _app): - """Modify environments to contain all required for implementation.""" - openharmony_path = os.path.join( - os.environ["OPENPYPE_REPOS_ROOT"], "openpype", "hosts", - "harmony", "vendor", "OpenHarmony" - ) - # TODO check if is already set? What to do if is already set? - env["LIB_OPENHARMONY_PATH"] = openharmony_path +__all__ = ( + "HARMONY_HOST_DIR", + "HarmonyAddon", +) diff --git a/openpype/hosts/harmony/addon.py b/openpype/hosts/harmony/addon.py new file mode 100644 index 0000000000..872a7490b5 --- /dev/null +++ b/openpype/hosts/harmony/addon.py @@ -0,0 +1,24 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +HARMONY_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class HarmonyAddon(OpenPypeModule, IHostAddon): + name = "harmony" + host_name = "harmony" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + """Modify environments to contain all required for implementation.""" + openharmony_path = os.path.join( + HARMONY_HOST_DIR, "vendor", "OpenHarmony" + ) + # TODO check if is already set? What to do if is already set? + env["LIB_OPENHARMONY_PATH"] = openharmony_path + + def get_workfile_extensions(self): + return [".zip"] diff --git a/openpype/hosts/harmony/api/pipeline.py b/openpype/hosts/harmony/api/pipeline.py index 4d71b9380d..4b9849c190 100644 --- a/openpype/hosts/harmony/api/pipeline.py +++ b/openpype/hosts/harmony/api/pipeline.py @@ -14,14 +14,14 @@ from openpype.pipeline import ( ) from openpype.pipeline.load import get_outdated_containers from openpype.pipeline.context_tools import get_current_project_asset -import openpype.hosts.harmony + +from openpype.hosts.harmony import HARMONY_HOST_DIR import openpype.hosts.harmony.api as harmony log = logging.getLogger("openpype.hosts.harmony") -HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.harmony.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PLUGINS_DIR = os.path.join(HARMONY_HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") diff --git a/openpype/hosts/harmony/api/workio.py b/openpype/hosts/harmony/api/workio.py index ab1cb9b1a9..8df5ede917 100644 --- a/openpype/hosts/harmony/api/workio.py +++ b/openpype/hosts/harmony/api/workio.py @@ -2,8 +2,6 @@ import os import shutil -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS - from .lib import ( ProcessContext, get_local_harmony_path, @@ -16,7 +14,7 @@ save_disabled = False def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["harmony"] + return [".zip"] def has_unsaved_changes(): diff --git a/openpype/hosts/harmony/plugins/publish/collect_workfile.py b/openpype/hosts/harmony/plugins/publish/collect_workfile.py index c0493315a4..3624147435 100644 --- a/openpype/hosts/harmony/plugins/publish/collect_workfile.py +++ b/openpype/hosts/harmony/plugins/publish/collect_workfile.py @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- """Collect current workfile from Harmony.""" -import pyblish.api import os +import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectWorkfile(pyblish.api.ContextPlugin): @@ -17,13 +17,14 @@ class CollectWorkfile(pyblish.api.ContextPlugin): """Plugin entry point.""" family = "workfile" basename = os.path.basename(context.data["currentFile"]) - subset = get_subset_name_with_asset_doc( + subset = get_subset_name( family, "", context.data["anatomyData"]["task"]["name"], context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"] + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] ) # Create instance diff --git a/openpype/hosts/harmony/plugins/publish/extract_palette.py b/openpype/hosts/harmony/plugins/publish/extract_palette.py index fae778f6b0..69c6e098ff 100644 --- a/openpype/hosts/harmony/plugins/publish/extract_palette.py +++ b/openpype/hosts/harmony/plugins/publish/extract_palette.py @@ -6,10 +6,10 @@ import csv from PIL import Image, ImageDraw, ImageFont import openpype.hosts.harmony.api as harmony -import openpype.api +from openpype.pipeline import publish -class ExtractPalette(openpype.api.Extractor): +class ExtractPalette(publish.Extractor): """Extract palette.""" label = "Extract Palette" diff --git a/openpype/hosts/harmony/plugins/publish/extract_template.py b/openpype/hosts/harmony/plugins/publish/extract_template.py index d25b07bba3..458bf25a3c 100644 --- a/openpype/hosts/harmony/plugins/publish/extract_template.py +++ b/openpype/hosts/harmony/plugins/publish/extract_template.py @@ -3,12 +3,11 @@ import os import shutil -import openpype.api +from openpype.pipeline import publish import openpype.hosts.harmony.api as harmony -import openpype.hosts.harmony -class ExtractTemplate(openpype.api.Extractor): +class ExtractTemplate(publish.Extractor): """Extract the connected nodes to the composite instance.""" label = "Extract Template" @@ -50,7 +49,7 @@ class ExtractTemplate(openpype.api.Extractor): dependencies.remove(instance.data["setMembers"][0]) # Export template. - openpype.hosts.harmony.api.export_template( + harmony.export_template( unique_backdrops, dependencies, filepath ) diff --git a/openpype/hosts/harmony/plugins/publish/extract_workfile.py b/openpype/hosts/harmony/plugins/publish/extract_workfile.py index 7f25ec8150..9bb3090558 100644 --- a/openpype/hosts/harmony/plugins/publish/extract_workfile.py +++ b/openpype/hosts/harmony/plugins/publish/extract_workfile.py @@ -4,10 +4,10 @@ import os import shutil from zipfile import ZipFile -import openpype.api +from openpype.pipeline import publish -class ExtractWorkfile(openpype.api.Extractor): +class ExtractWorkfile(publish.Extractor): """Extract and zip complete workfile folder into zip.""" label = "Extract Workfile" diff --git a/openpype/hosts/harmony/plugins/publish/increment_workfile.py b/openpype/hosts/harmony/plugins/publish/increment_workfile.py index 417377fff8..1caf581567 100644 --- a/openpype/hosts/harmony/plugins/publish/increment_workfile.py +++ b/openpype/hosts/harmony/plugins/publish/increment_workfile.py @@ -1,7 +1,7 @@ import os import pyblish.api -from openpype.action import get_errored_plugins_from_data +from openpype.pipeline.publish import get_errored_plugins_from_context from openpype.lib import version_up import openpype.hosts.harmony.api as harmony @@ -19,7 +19,7 @@ class IncrementWorkfile(pyblish.api.InstancePlugin): optional = True def process(self, instance): - errored_plugins = get_errored_plugins_from_data(instance.context) + errored_plugins = get_errored_plugins_from_context(instance.context) if errored_plugins: raise RuntimeError( "Skipping incrementing current file because publishing failed." diff --git a/openpype/hosts/harmony/plugins/publish/validate_instances.py b/openpype/hosts/harmony/plugins/publish/validate_instances.py index 373ef94cc3..ac367082ef 100644 --- a/openpype/hosts/harmony/plugins/publish/validate_instances.py +++ b/openpype/hosts/harmony/plugins/publish/validate_instances.py @@ -1,9 +1,12 @@ import os import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError + import openpype.hosts.harmony.api as harmony +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateInstanceRepair(pyblish.api.Action): @@ -37,7 +40,7 @@ class ValidateInstance(pyblish.api.InstancePlugin): label = "Validate Instance" hosts = ["harmony"] actions = [ValidateInstanceRepair] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder def process(self, instance): instance_asset = instance.data["asset"] diff --git a/openpype/hosts/hiero/__init__.py b/openpype/hosts/hiero/__init__.py index d2ac82391b..e6744d5aec 100644 --- a/openpype/hosts/hiero/__init__.py +++ b/openpype/hosts/hiero/__init__.py @@ -1,41 +1,10 @@ -import os -import platform +from .addon import ( + HIERO_ROOT_DIR, + HieroAddon, +) -def add_implementation_envs(env, _app): - # Add requirements to HIERO_PLUGIN_PATH - pype_root = os.environ["OPENPYPE_REPOS_ROOT"] - new_hiero_paths = [ - os.path.join(pype_root, "openpype", "hosts", "hiero", "api", "startup") - ] - old_hiero_path = env.get("HIERO_PLUGIN_PATH") or "" - for path in old_hiero_path.split(os.pathsep): - if not path: - continue - - norm_path = os.path.normpath(path) - if norm_path not in new_hiero_paths: - new_hiero_paths.append(norm_path) - - env["HIERO_PLUGIN_PATH"] = os.pathsep.join(new_hiero_paths) - env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) - - # Try to add QuickTime to PATH - quick_time_path = "C:/Program Files (x86)/QuickTime/QTSystem" - if platform.system() == "windows" and os.path.exists(quick_time_path): - path_value = env.get("PATH") or "" - path_paths = [ - path - for path in path_value.split(os.pathsep) - if path - ] - path_paths.append(quick_time_path) - env["PATH"] = os.pathsep.join(path_paths) - - # Set default values if are not already set via settings - defaults = { - "LOGLEVEL": "DEBUG" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value +__all__ = ( + "HIERO_ROOT_DIR", + "HieroAddon", +) diff --git a/openpype/hosts/hiero/addon.py b/openpype/hosts/hiero/addon.py new file mode 100644 index 0000000000..3523e9aed7 --- /dev/null +++ b/openpype/hosts/hiero/addon.py @@ -0,0 +1,63 @@ +import os +import platform +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +HIERO_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class HieroAddon(OpenPypeModule, IHostAddon): + name = "hiero" + host_name = "hiero" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + # Add requirements to HIERO_PLUGIN_PATH + new_hiero_paths = [ + os.path.join(HIERO_ROOT_DIR, "api", "startup") + ] + old_hiero_path = env.get("HIERO_PLUGIN_PATH") or "" + for path in old_hiero_path.split(os.pathsep): + if not path: + continue + + norm_path = os.path.normpath(path) + if norm_path not in new_hiero_paths: + new_hiero_paths.append(norm_path) + + env["HIERO_PLUGIN_PATH"] = os.pathsep.join(new_hiero_paths) + env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + + # Add vendor to PYTHONPATH + python_path = env["PYTHONPATH"] + python_path_parts = [] + if python_path: + python_path_parts = python_path.split(os.pathsep) + vendor_path = os.path.join(HIERO_ROOT_DIR, "vendor") + python_path_parts.insert(0, vendor_path) + env["PYTHONPATH"] = os.pathsep.join(python_path_parts) + + # Set default values if are not already set via settings + defaults = { + "LOGLEVEL": "DEBUG" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + # Try to add QuickTime to PATH + quick_time_path = "C:/Program Files (x86)/QuickTime/QTSystem" + if platform.system() == "windows" and os.path.exists(quick_time_path): + path_value = env.get("PATH") or "" + path_paths = [ + path + for path in path_value.split(os.pathsep) + if path + ] + path_paths.append(quick_time_path) + env["PATH"] = os.pathsep.join(path_paths) + + def get_workfile_extensions(self): + return [".hrox"] diff --git a/openpype/hosts/hiero/api/events.py b/openpype/hosts/hiero/api/events.py index 59fd278a81..862a2607c1 100644 --- a/openpype/hosts/hiero/api/events.py +++ b/openpype/hosts/hiero/api/events.py @@ -1,7 +1,6 @@ import os import hiero.core.events -from openpype.api import Logger -from openpype.lib import register_event_callback +from openpype.lib import Logger, register_event_callback from .lib import ( sync_avalon_data_to_workfile, launch_workfiles_app, @@ -11,7 +10,7 @@ from .lib import ( from .tags import add_tags_to_workfile from .menu import update_menu_task_label -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) def startupCompleted(event): diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index 2f66f3ddd7..e5d35945af 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -13,15 +13,11 @@ import hiero from Qt import QtWidgets -from openpype.client import ( - get_project, - get_versions, - get_last_versions, - get_representations, -) -from openpype.settings import get_anatomy_settings +from openpype.client import get_project +from openpype.settings import get_project_settings from openpype.pipeline import legacy_io, Anatomy -from openpype.api import Logger +from openpype.pipeline.load import filter_containers +from openpype.lib import Logger from . import tags try: @@ -34,7 +30,7 @@ except ImportError: # from opentimelineio import opentime # from pprint import pformat -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) self = sys.modules[__name__] self._has_been_setup = False @@ -882,8 +878,7 @@ def apply_colorspace_project(): project.close() # get presets for hiero - imageio = get_anatomy_settings( - project_name)["imageio"].get("hiero", None) + imageio = get_project_settings(project_name)["hiero"]["imageio"] presets = imageio.get("workfile") # save the workfile as subversion "comment:_colorspaceChange" @@ -936,8 +931,7 @@ def apply_colorspace_clips(): clips = project.clips() # get presets for hiero - imageio = get_anatomy_settings( - project_name)["imageio"].get("hiero", None) + imageio = get_project_settings(project_name)["hiero"]["imageio"] from pprint import pprint presets = imageio.get("regexInputs", {}).get("inputs", {}) @@ -1055,6 +1049,10 @@ def sync_clip_name_to_data_asset(track_items_list): print("asset was changed in clip: {}".format(ti_name)) +def set_track_color(track_item, color): + track_item.source().binItem().setColor(color) + + def check_inventory_versions(track_items=None): """ Actual version color idetifier of Loaded containers @@ -1066,68 +1064,29 @@ def check_inventory_versions(track_items=None): """ from . import parse_container - track_item = track_items or get_track_items() + track_items = track_items or get_track_items() # presets clip_color_last = "green" clip_color = "red" - item_with_repre_id = [] - repre_ids = set() + containers = [] # Find all containers and collect it's node and representation ids - for track_item in track_item: + for track_item in track_items: container = parse_container(track_item) if container: - repre_id = container["representation"] - repre_ids.add(repre_id) - item_with_repre_id.append((track_item, repre_id)) + containers.append(container) # Skip if nothing was found - if not repre_ids: + if not containers: return project_name = legacy_io.active_project() - # Find representations based on found containers - repre_docs = get_representations( - project_name, - repre_ids=repre_ids, - fields=["_id", "parent"] - ) - # Store representations by id and collect version ids - repre_docs_by_id = {} - version_ids = set() - for repre_doc in repre_docs: - # Use stringed representation id to match value in containers - repre_id = str(repre_doc["_id"]) - repre_docs_by_id[repre_id] = repre_doc - version_ids.add(repre_doc["parent"]) + filter_result = filter_containers(containers, project_name) + for container in filter_result.latest: + set_track_color(container["_track_item"], clip_color) - version_docs = get_versions( - project_name, version_ids, fields=["_id", "name", "parent"] - ) - # Store versions by id and collect subset ids - version_docs_by_id = {} - subset_ids = set() - for version_doc in version_docs: - version_docs_by_id[version_doc["_id"]] = version_doc - subset_ids.add(version_doc["parent"]) - - # Query last versions based on subset ids - last_versions_by_subset_id = get_last_versions( - project_name, subset_ids=subset_ids, fields=["_id", "parent"] - ) - - for item in item_with_repre_id: - # Some python versions of nuke can't unfold tuple in for loop - track_item, repre_id = item - - repre_doc = repre_docs_by_id[repre_id] - version_doc = version_docs_by_id[repre_doc["parent"]] - last_version_doc = last_versions_by_subset_id[version_doc["parent"]] - # Check if last version is same as current version - if version_doc["_id"] == last_version_doc["_id"]: - track_item.source().binItem().setColor(clip_color_last) - else: - track_item.source().binItem().setColor(clip_color) + for container in filter_result.outdated: + set_track_color(container["_track_item"], clip_color_last) def selection_changed_timeline(event): diff --git a/openpype/hosts/hiero/api/menu.py b/openpype/hosts/hiero/api/menu.py index 541a1f1f92..2a7560c6ba 100644 --- a/openpype/hosts/hiero/api/menu.py +++ b/openpype/hosts/hiero/api/menu.py @@ -4,7 +4,7 @@ import sys import hiero.core from hiero.ui import findMenuAction -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index b243a38b06..ea61dc4785 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -6,7 +6,7 @@ import contextlib from collections import OrderedDict from pyblish import api as pyblish -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import ( schema, register_creator_plugin_path, @@ -18,7 +18,7 @@ from openpype.pipeline import ( from openpype.tools.utils import host_tools from . import lib, menu, events -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) # plugin paths API_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -251,7 +251,6 @@ def reload_config(): import importlib for module in ( - "openpype.api", "openpype.hosts.hiero.lib", "openpype.hosts.hiero.menu", "openpype.hosts.hiero.tags" diff --git a/openpype/hosts/hiero/api/plugin.py b/openpype/hosts/hiero/api/plugin.py index 28a9dfb492..ea8a9e836a 100644 --- a/openpype/hosts/hiero/api/plugin.py +++ b/openpype/hosts/hiero/api/plugin.py @@ -8,12 +8,13 @@ import hiero from Qt import QtWidgets, QtCore import qargparse -import openpype.api as openpype +from openpype.settings import get_current_project_settings +from openpype.lib import Logger from openpype.pipeline import LoaderPlugin, LegacyCreator from openpype.pipeline.context_tools import get_current_project_asset from . import lib -log = openpype.Logger().get_logger(__name__) +log = Logger.get_logger(__name__) def load_stylesheet(): @@ -605,7 +606,7 @@ class Creator(LegacyCreator): def __init__(self, *args, **kwargs): super(Creator, self).__init__(*args, **kwargs) import openpype.hosts.hiero.api as phiero - self.presets = openpype.get_current_project_settings()[ + self.presets = get_current_project_settings()[ "hiero"]["create"].get(self.__class__.__name__, {}) # adding basic current context resolve objects diff --git a/openpype/hosts/hiero/api/tags.py b/openpype/hosts/hiero/api/tags.py index 10df96fa53..fac26da03a 100644 --- a/openpype/hosts/hiero/api/tags.py +++ b/openpype/hosts/hiero/api/tags.py @@ -3,7 +3,7 @@ import os import hiero from openpype.client import get_project, get_assets -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import legacy_io log = Logger.get_logger(__name__) diff --git a/openpype/hosts/hiero/api/workio.py b/openpype/hosts/hiero/api/workio.py index 394cb5e2ab..040fd1435a 100644 --- a/openpype/hosts/hiero/api/workio.py +++ b/openpype/hosts/hiero/api/workio.py @@ -1,14 +1,13 @@ import os import hiero -from openpype.api import Logger -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS +from openpype.lib import Logger log = Logger.get_logger(__name__) def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["hiero"] + return [".hrox"] def has_unsaved_changes(): diff --git a/openpype/hosts/hiero/plugins/publish/extract_clip_effects.py b/openpype/hosts/hiero/plugins/publish/extract_clip_effects.py index 5b0aa270a7..7fb381ff7e 100644 --- a/openpype/hosts/hiero/plugins/publish/extract_clip_effects.py +++ b/openpype/hosts/hiero/plugins/publish/extract_clip_effects.py @@ -2,10 +2,11 @@ import os import json import pyblish.api -import openpype + +from openpype.pipeline import publish -class ExtractClipEffects(openpype.api.Extractor): +class ExtractClipEffects(publish.Extractor): """Extract clip effects instances.""" order = pyblish.api.ExtractorOrder diff --git a/openpype/hosts/hiero/plugins/publish/extract_frames.py b/openpype/hosts/hiero/plugins/publish/extract_frames.py index aa3eda2e9f..f865d2fb39 100644 --- a/openpype/hosts/hiero/plugins/publish/extract_frames.py +++ b/openpype/hosts/hiero/plugins/publish/extract_frames.py @@ -1,9 +1,14 @@ import os import pyblish.api -import openpype + +from openpype.lib import ( + get_oiio_tools_path, + run_subprocess, +) +from openpype.pipeline import publish -class ExtractFrames(openpype.api.Extractor): +class ExtractFrames(publish.Extractor): """Extracts frames""" order = pyblish.api.ExtractorOrder @@ -13,7 +18,7 @@ class ExtractFrames(openpype.api.Extractor): movie_extensions = ["mov", "mp4"] def process(self, instance): - oiio_tool_path = openpype.lib.get_oiio_tools_path() + oiio_tool_path = get_oiio_tools_path() staging_dir = self.staging_dir(instance) output_template = os.path.join(staging_dir, instance.data["name"]) sequence = instance.context.data["activeTimeline"] @@ -43,7 +48,7 @@ class ExtractFrames(openpype.api.Extractor): args.extend(["--powc", "0.45,0.45,0.45,1.0"]) args.extend([input_path, "-o", output_path]) - output = openpype.api.run_subprocess(args) + output = run_subprocess(args) failed_output = "oiiotool produced no output." if failed_output in output: diff --git a/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py b/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py index d12e7665bf..e64aa89b26 100644 --- a/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py @@ -1,9 +1,10 @@ import os import pyblish.api -import openpype.api + +from openpype.pipeline import publish -class ExtractThumnail(openpype.api.Extractor): +class ExtractThumnail(publish.Extractor): """ Extractor for track item's tumnails """ diff --git a/openpype/hosts/hiero/plugins/publish/integrate_version_up_workfile.py b/openpype/hosts/hiero/plugins/publish/integrate_version_up_workfile.py index 934e7112fa..6ccbe955f2 100644 --- a/openpype/hosts/hiero/plugins/publish/integrate_version_up_workfile.py +++ b/openpype/hosts/hiero/plugins/publish/integrate_version_up_workfile.py @@ -1,5 +1,6 @@ from pyblish import api -import openpype.api as pype + +from openpype.lib import version_up class IntegrateVersionUpWorkfile(api.ContextPlugin): @@ -15,7 +16,7 @@ class IntegrateVersionUpWorkfile(api.ContextPlugin): def process(self, context): project = context.data["activeProject"] path = context.data.get("currentFile") - new_path = pype.version_up(path) + new_path = version_up(path) if project: project.saveAs(new_path) diff --git a/openpype/hosts/hiero/plugins/publish/precollect_instances.py b/openpype/hosts/hiero/plugins/publish/precollect_instances.py index 0c7dbc1f22..84f2927fc7 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_instances.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_instances.py @@ -318,10 +318,9 @@ class PrecollectInstances(pyblish.api.ContextPlugin): @staticmethod def create_otio_time_range_from_timeline_item_data(track_item): - speed = track_item.playbackSpeed() timeline = phiero.get_current_sequence() frame_start = int(track_item.timelineIn()) - frame_duration = int((track_item.duration() - 1) / speed) + frame_duration = int(track_item.duration()) fps = timeline.framerate().toFloat() return hiero_export.create_otio_time_range( diff --git a/openpype/hosts/hiero/vendor/google/protobuf/__init__.py b/openpype/hosts/hiero/vendor/google/protobuf/__init__.py new file mode 100644 index 0000000000..03f3b29ee7 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/__init__.py @@ -0,0 +1,33 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Copyright 2007 Google Inc. All Rights Reserved. + +__version__ = '3.20.1' diff --git a/openpype/hosts/hiero/vendor/google/protobuf/any_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/any_pb2.py new file mode 100644 index 0000000000..9121193d11 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/any_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/any.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42v\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _ANY._serialized_start=46 + _ANY._serialized_end=84 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/api_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/api_pb2.py new file mode 100644 index 0000000000..1721b10a75 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/api_pb2.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/api.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 +from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _API._serialized_start=113 + _API._serialized_end=370 + _METHOD._serialized_start=373 + _METHOD._serialized_end=586 + _MIXIN._serialized_start=588 + _MIXIN._serialized_end=623 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/compiler/__init__.py b/openpype/hosts/hiero/vendor/google/protobuf/compiler/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/hiero/vendor/google/protobuf/compiler/plugin_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/compiler/plugin_pb2.py new file mode 100644 index 0000000000..715a891370 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/compiler/plugin_pb2.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/compiler/plugin.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"F\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\x0e\n\x06suffix\x18\x04 \x01(\t\"\xba\x01\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\x12;\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.Version\"\xc1\x02\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x1a\n\x12supported_features\x18\x02 \x01(\x04\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a\x7f\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\t\x12?\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfo\"8\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x42W\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb' + _VERSION._serialized_start=101 + _VERSION._serialized_end=171 + _CODEGENERATORREQUEST._serialized_start=174 + _CODEGENERATORREQUEST._serialized_end=360 + _CODEGENERATORRESPONSE._serialized_start=363 + _CODEGENERATORRESPONSE._serialized_end=684 + _CODEGENERATORRESPONSE_FILE._serialized_start=499 + _CODEGENERATORRESPONSE_FILE._serialized_end=626 + _CODEGENERATORRESPONSE_FEATURE._serialized_start=628 + _CODEGENERATORRESPONSE_FEATURE._serialized_end=684 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/descriptor.py b/openpype/hosts/hiero/vendor/google/protobuf/descriptor.py new file mode 100644 index 0000000000..ad70be9a11 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/descriptor.py @@ -0,0 +1,1224 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Descriptors essentially contain exactly the information found in a .proto +file, in types that make this information accessible in Python. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import threading +import warnings + +from google.protobuf.internal import api_implementation + +_USE_C_DESCRIPTORS = False +if api_implementation.Type() == 'cpp': + # Used by MakeDescriptor in cpp mode + import binascii + import os + from google.protobuf.pyext import _message + _USE_C_DESCRIPTORS = True + + +class Error(Exception): + """Base error for this module.""" + + +class TypeTransformationError(Error): + """Error transforming between python proto type and corresponding C++ type.""" + + +if _USE_C_DESCRIPTORS: + # This metaclass allows to override the behavior of code like + # isinstance(my_descriptor, FieldDescriptor) + # and make it return True when the descriptor is an instance of the extension + # type written in C++. + class DescriptorMetaclass(type): + def __instancecheck__(cls, obj): + if super(DescriptorMetaclass, cls).__instancecheck__(obj): + return True + if isinstance(obj, cls._C_DESCRIPTOR_CLASS): + return True + return False +else: + # The standard metaclass; nothing changes. + DescriptorMetaclass = type + + +class _Lock(object): + """Wrapper class of threading.Lock(), which is allowed by 'with'.""" + + def __new__(cls): + self = object.__new__(cls) + self._lock = threading.Lock() # pylint: disable=protected-access + return self + + def __enter__(self): + self._lock.acquire() + + def __exit__(self, exc_type, exc_value, exc_tb): + self._lock.release() + + +_lock = threading.Lock() + + +def _Deprecated(name): + if _Deprecated.count > 0: + _Deprecated.count -= 1 + warnings.warn( + 'Call to deprecated create function %s(). Note: Create unlinked ' + 'descriptors is going to go away. Please use get/find descriptors from ' + 'generated code or query the descriptor_pool.' + % name, + category=DeprecationWarning, stacklevel=3) + + +# Deprecated warnings will print 100 times at most which should be enough for +# users to notice and do not cause timeout. +_Deprecated.count = 100 + + +_internal_create_key = object() + + +class DescriptorBase(metaclass=DescriptorMetaclass): + + """Descriptors base class. + + This class is the base of all descriptor classes. It provides common options + related functionality. + + Attributes: + has_options: True if the descriptor has non-default options. Usually it + is not necessary to read this -- just call GetOptions() which will + happily return the default instance. However, it's sometimes useful + for efficiency, and also useful inside the protobuf implementation to + avoid some bootstrapping issues. + """ + + if _USE_C_DESCRIPTORS: + # The class, or tuple of classes, that are considered as "virtual + # subclasses" of this descriptor class. + _C_DESCRIPTOR_CLASS = () + + def __init__(self, options, serialized_options, options_class_name): + """Initialize the descriptor given its options message and the name of the + class of the options message. The name of the class is required in case + the options message is None and has to be created. + """ + self._options = options + self._options_class_name = options_class_name + self._serialized_options = serialized_options + + # Does this descriptor have non-default options? + self.has_options = (options is not None) or (serialized_options is not None) + + def _SetOptions(self, options, options_class_name): + """Sets the descriptor's options + + This function is used in generated proto2 files to update descriptor + options. It must not be used outside proto2. + """ + self._options = options + self._options_class_name = options_class_name + + # Does this descriptor have non-default options? + self.has_options = options is not None + + def GetOptions(self): + """Retrieves descriptor options. + + This method returns the options set or creates the default options for the + descriptor. + """ + if self._options: + return self._options + + from google.protobuf import descriptor_pb2 + try: + options_class = getattr(descriptor_pb2, + self._options_class_name) + except AttributeError: + raise RuntimeError('Unknown options class name %s!' % + (self._options_class_name)) + + with _lock: + if self._serialized_options is None: + self._options = options_class() + else: + self._options = _ParseOptions(options_class(), + self._serialized_options) + + return self._options + + +class _NestedDescriptorBase(DescriptorBase): + """Common class for descriptors that can be nested.""" + + def __init__(self, options, options_class_name, name, full_name, + file, containing_type, serialized_start=None, + serialized_end=None, serialized_options=None): + """Constructor. + + Args: + options: Protocol message options or None + to use default message options. + options_class_name (str): The class name of the above options. + name (str): Name of this protocol message type. + full_name (str): Fully-qualified name of this protocol message type, + which will include protocol "package" name and the name of any + enclosing types. + file (FileDescriptor): Reference to file info. + containing_type: if provided, this is a nested descriptor, with this + descriptor as parent, otherwise None. + serialized_start: The start index (inclusive) in block in the + file.serialized_pb that describes this descriptor. + serialized_end: The end index (exclusive) in block in the + file.serialized_pb that describes this descriptor. + serialized_options: Protocol message serialized options or None. + """ + super(_NestedDescriptorBase, self).__init__( + options, serialized_options, options_class_name) + + self.name = name + # TODO(falk): Add function to calculate full_name instead of having it in + # memory? + self.full_name = full_name + self.file = file + self.containing_type = containing_type + + self._serialized_start = serialized_start + self._serialized_end = serialized_end + + def CopyToProto(self, proto): + """Copies this to the matching proto in descriptor_pb2. + + Args: + proto: An empty proto instance from descriptor_pb2. + + Raises: + Error: If self couldn't be serialized, due to to few constructor + arguments. + """ + if (self.file is not None and + self._serialized_start is not None and + self._serialized_end is not None): + proto.ParseFromString(self.file.serialized_pb[ + self._serialized_start:self._serialized_end]) + else: + raise Error('Descriptor does not contain serialization.') + + +class Descriptor(_NestedDescriptorBase): + + """Descriptor for a protocol message type. + + Attributes: + name (str): Name of this protocol message type. + full_name (str): Fully-qualified name of this protocol message type, + which will include protocol "package" name and the name of any + enclosing types. + containing_type (Descriptor): Reference to the descriptor of the type + containing us, or None if this is top-level. + fields (list[FieldDescriptor]): Field descriptors for all fields in + this type. + fields_by_number (dict(int, FieldDescriptor)): Same + :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed + by "number" attribute in each FieldDescriptor. + fields_by_name (dict(str, FieldDescriptor)): Same + :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed by + "name" attribute in each :class:`FieldDescriptor`. + nested_types (list[Descriptor]): Descriptor references + for all protocol message types nested within this one. + nested_types_by_name (dict(str, Descriptor)): Same Descriptor + objects as in :attr:`nested_types`, but indexed by "name" attribute + in each Descriptor. + enum_types (list[EnumDescriptor]): :class:`EnumDescriptor` references + for all enums contained within this type. + enum_types_by_name (dict(str, EnumDescriptor)): Same + :class:`EnumDescriptor` objects as in :attr:`enum_types`, but + indexed by "name" attribute in each EnumDescriptor. + enum_values_by_name (dict(str, EnumValueDescriptor)): Dict mapping + from enum value name to :class:`EnumValueDescriptor` for that value. + extensions (list[FieldDescriptor]): All extensions defined directly + within this message type (NOT within a nested type). + extensions_by_name (dict(str, FieldDescriptor)): Same FieldDescriptor + objects as :attr:`extensions`, but indexed by "name" attribute of each + FieldDescriptor. + is_extendable (bool): Does this type define any extension ranges? + oneofs (list[OneofDescriptor]): The list of descriptors for oneof fields + in this message. + oneofs_by_name (dict(str, OneofDescriptor)): Same objects as in + :attr:`oneofs`, but indexed by "name" attribute. + file (FileDescriptor): Reference to file descriptor. + + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.Descriptor + + def __new__( + cls, + name=None, + full_name=None, + filename=None, + containing_type=None, + fields=None, + nested_types=None, + enum_types=None, + extensions=None, + options=None, + serialized_options=None, + is_extendable=True, + extension_ranges=None, + oneofs=None, + file=None, # pylint: disable=redefined-builtin + serialized_start=None, + serialized_end=None, + syntax=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindMessageTypeByName(full_name) + + # NOTE(tmarek): The file argument redefining a builtin is nothing we can + # fix right now since we don't know how many clients already rely on the + # name of the argument. + def __init__(self, name, full_name, filename, containing_type, fields, + nested_types, enum_types, extensions, options=None, + serialized_options=None, + is_extendable=True, extension_ranges=None, oneofs=None, + file=None, serialized_start=None, serialized_end=None, # pylint: disable=redefined-builtin + syntax=None, create_key=None): + """Arguments to __init__() are as described in the description + of Descriptor fields above. + + Note that filename is an obsolete argument, that is not used anymore. + Please use file.name to access this as an attribute. + """ + if create_key is not _internal_create_key: + _Deprecated('Descriptor') + + super(Descriptor, self).__init__( + options, 'MessageOptions', name, full_name, file, + containing_type, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + + # We have fields in addition to fields_by_name and fields_by_number, + # so that: + # 1. Clients can index fields by "order in which they're listed." + # 2. Clients can easily iterate over all fields with the terse + # syntax: for f in descriptor.fields: ... + self.fields = fields + for field in self.fields: + field.containing_type = self + self.fields_by_number = dict((f.number, f) for f in fields) + self.fields_by_name = dict((f.name, f) for f in fields) + self._fields_by_camelcase_name = None + + self.nested_types = nested_types + for nested_type in nested_types: + nested_type.containing_type = self + self.nested_types_by_name = dict((t.name, t) for t in nested_types) + + self.enum_types = enum_types + for enum_type in self.enum_types: + enum_type.containing_type = self + self.enum_types_by_name = dict((t.name, t) for t in enum_types) + self.enum_values_by_name = dict( + (v.name, v) for t in enum_types for v in t.values) + + self.extensions = extensions + for extension in self.extensions: + extension.extension_scope = self + self.extensions_by_name = dict((f.name, f) for f in extensions) + self.is_extendable = is_extendable + self.extension_ranges = extension_ranges + self.oneofs = oneofs if oneofs is not None else [] + self.oneofs_by_name = dict((o.name, o) for o in self.oneofs) + for oneof in self.oneofs: + oneof.containing_type = self + self.syntax = syntax or "proto2" + + @property + def fields_by_camelcase_name(self): + """Same FieldDescriptor objects as in :attr:`fields`, but indexed by + :attr:`FieldDescriptor.camelcase_name`. + """ + if self._fields_by_camelcase_name is None: + self._fields_by_camelcase_name = dict( + (f.camelcase_name, f) for f in self.fields) + return self._fields_by_camelcase_name + + def EnumValueName(self, enum, value): + """Returns the string name of an enum value. + + This is just a small helper method to simplify a common operation. + + Args: + enum: string name of the Enum. + value: int, value of the enum. + + Returns: + string name of the enum value. + + Raises: + KeyError if either the Enum doesn't exist or the value is not a valid + value for the enum. + """ + return self.enum_types_by_name[enum].values_by_number[value].name + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.DescriptorProto. + + Args: + proto: An empty descriptor_pb2.DescriptorProto. + """ + # This function is overridden to give a better doc comment. + super(Descriptor, self).CopyToProto(proto) + + +# TODO(robinson): We should have aggressive checking here, +# for example: +# * If you specify a repeated field, you should not be allowed +# to specify a default value. +# * [Other examples here as needed]. +# +# TODO(robinson): for this and other *Descriptor classes, we +# might also want to lock things down aggressively (e.g., +# prevent clients from setting the attributes). Having +# stronger invariants here in general will reduce the number +# of runtime checks we must do in reflection.py... +class FieldDescriptor(DescriptorBase): + + """Descriptor for a single field in a .proto file. + + Attributes: + name (str): Name of this field, exactly as it appears in .proto. + full_name (str): Name of this field, including containing scope. This is + particularly relevant for extensions. + index (int): Dense, 0-indexed index giving the order that this + field textually appears within its message in the .proto file. + number (int): Tag number declared for this field in the .proto file. + + type (int): (One of the TYPE_* constants below) Declared type. + cpp_type (int): (One of the CPPTYPE_* constants below) C++ type used to + represent this field. + + label (int): (One of the LABEL_* constants below) Tells whether this + field is optional, required, or repeated. + has_default_value (bool): True if this field has a default value defined, + otherwise false. + default_value (Varies): Default value of this field. Only + meaningful for non-repeated scalar fields. Repeated fields + should always set this to [], and non-repeated composite + fields should always set this to None. + + containing_type (Descriptor): Descriptor of the protocol message + type that contains this field. Set by the Descriptor constructor + if we're passed into one. + Somewhat confusingly, for extension fields, this is the + descriptor of the EXTENDED message, not the descriptor + of the message containing this field. (See is_extension and + extension_scope below). + message_type (Descriptor): If a composite field, a descriptor + of the message type contained in this field. Otherwise, this is None. + enum_type (EnumDescriptor): If this field contains an enum, a + descriptor of that enum. Otherwise, this is None. + + is_extension: True iff this describes an extension field. + extension_scope (Descriptor): Only meaningful if is_extension is True. + Gives the message that immediately contains this extension field. + Will be None iff we're a top-level (file-level) extension field. + + options (descriptor_pb2.FieldOptions): Protocol message field options or + None to use default field options. + + containing_oneof (OneofDescriptor): If the field is a member of a oneof + union, contains its descriptor. Otherwise, None. + + file (FileDescriptor): Reference to file descriptor. + """ + + # Must be consistent with C++ FieldDescriptor::Type enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + TYPE_DOUBLE = 1 + TYPE_FLOAT = 2 + TYPE_INT64 = 3 + TYPE_UINT64 = 4 + TYPE_INT32 = 5 + TYPE_FIXED64 = 6 + TYPE_FIXED32 = 7 + TYPE_BOOL = 8 + TYPE_STRING = 9 + TYPE_GROUP = 10 + TYPE_MESSAGE = 11 + TYPE_BYTES = 12 + TYPE_UINT32 = 13 + TYPE_ENUM = 14 + TYPE_SFIXED32 = 15 + TYPE_SFIXED64 = 16 + TYPE_SINT32 = 17 + TYPE_SINT64 = 18 + MAX_TYPE = 18 + + # Must be consistent with C++ FieldDescriptor::CppType enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + CPPTYPE_INT32 = 1 + CPPTYPE_INT64 = 2 + CPPTYPE_UINT32 = 3 + CPPTYPE_UINT64 = 4 + CPPTYPE_DOUBLE = 5 + CPPTYPE_FLOAT = 6 + CPPTYPE_BOOL = 7 + CPPTYPE_ENUM = 8 + CPPTYPE_STRING = 9 + CPPTYPE_MESSAGE = 10 + MAX_CPPTYPE = 10 + + _PYTHON_TO_CPP_PROTO_TYPE_MAP = { + TYPE_DOUBLE: CPPTYPE_DOUBLE, + TYPE_FLOAT: CPPTYPE_FLOAT, + TYPE_ENUM: CPPTYPE_ENUM, + TYPE_INT64: CPPTYPE_INT64, + TYPE_SINT64: CPPTYPE_INT64, + TYPE_SFIXED64: CPPTYPE_INT64, + TYPE_UINT64: CPPTYPE_UINT64, + TYPE_FIXED64: CPPTYPE_UINT64, + TYPE_INT32: CPPTYPE_INT32, + TYPE_SFIXED32: CPPTYPE_INT32, + TYPE_SINT32: CPPTYPE_INT32, + TYPE_UINT32: CPPTYPE_UINT32, + TYPE_FIXED32: CPPTYPE_UINT32, + TYPE_BYTES: CPPTYPE_STRING, + TYPE_STRING: CPPTYPE_STRING, + TYPE_BOOL: CPPTYPE_BOOL, + TYPE_MESSAGE: CPPTYPE_MESSAGE, + TYPE_GROUP: CPPTYPE_MESSAGE + } + + # Must be consistent with C++ FieldDescriptor::Label enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + LABEL_OPTIONAL = 1 + LABEL_REQUIRED = 2 + LABEL_REPEATED = 3 + MAX_LABEL = 3 + + # Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber, + # and kLastReservedNumber in descriptor.h + MAX_FIELD_NUMBER = (1 << 29) - 1 + FIRST_RESERVED_FIELD_NUMBER = 19000 + LAST_RESERVED_FIELD_NUMBER = 19999 + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.FieldDescriptor + + def __new__(cls, name, full_name, index, number, type, cpp_type, label, + default_value, message_type, enum_type, containing_type, + is_extension, extension_scope, options=None, + serialized_options=None, + has_default_value=True, containing_oneof=None, json_name=None, + file=None, create_key=None): # pylint: disable=redefined-builtin + _message.Message._CheckCalledFromGeneratedFile() + if is_extension: + return _message.default_pool.FindExtensionByName(full_name) + else: + return _message.default_pool.FindFieldByName(full_name) + + def __init__(self, name, full_name, index, number, type, cpp_type, label, + default_value, message_type, enum_type, containing_type, + is_extension, extension_scope, options=None, + serialized_options=None, + has_default_value=True, containing_oneof=None, json_name=None, + file=None, create_key=None): # pylint: disable=redefined-builtin + """The arguments are as described in the description of FieldDescriptor + attributes above. + + Note that containing_type may be None, and may be set later if necessary + (to deal with circular references between message types, for example). + Likewise for extension_scope. + """ + if create_key is not _internal_create_key: + _Deprecated('FieldDescriptor') + + super(FieldDescriptor, self).__init__( + options, serialized_options, 'FieldOptions') + self.name = name + self.full_name = full_name + self.file = file + self._camelcase_name = None + if json_name is None: + self.json_name = _ToJsonName(name) + else: + self.json_name = json_name + self.index = index + self.number = number + self.type = type + self.cpp_type = cpp_type + self.label = label + self.has_default_value = has_default_value + self.default_value = default_value + self.containing_type = containing_type + self.message_type = message_type + self.enum_type = enum_type + self.is_extension = is_extension + self.extension_scope = extension_scope + self.containing_oneof = containing_oneof + if api_implementation.Type() == 'cpp': + if is_extension: + self._cdescriptor = _message.default_pool.FindExtensionByName(full_name) + else: + self._cdescriptor = _message.default_pool.FindFieldByName(full_name) + else: + self._cdescriptor = None + + @property + def camelcase_name(self): + """Camelcase name of this field. + + Returns: + str: the name in CamelCase. + """ + if self._camelcase_name is None: + self._camelcase_name = _ToCamelCase(self.name) + return self._camelcase_name + + @property + def has_presence(self): + """Whether the field distinguishes between unpopulated and default values. + + Raises: + RuntimeError: singular field that is not linked with message nor file. + """ + if self.label == FieldDescriptor.LABEL_REPEATED: + return False + if (self.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE or + self.containing_oneof): + return True + if hasattr(self.file, 'syntax'): + return self.file.syntax == 'proto2' + if hasattr(self.message_type, 'syntax'): + return self.message_type.syntax == 'proto2' + raise RuntimeError( + 'has_presence is not ready to use because field %s is not' + ' linked with message type nor file' % self.full_name) + + @staticmethod + def ProtoTypeToCppProtoType(proto_type): + """Converts from a Python proto type to a C++ Proto Type. + + The Python ProtocolBuffer classes specify both the 'Python' datatype and the + 'C++' datatype - and they're not the same. This helper method should + translate from one to another. + + Args: + proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*) + Returns: + int: descriptor.FieldDescriptor.CPPTYPE_*, the C++ type. + Raises: + TypeTransformationError: when the Python proto type isn't known. + """ + try: + return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type] + except KeyError: + raise TypeTransformationError('Unknown proto_type: %s' % proto_type) + + +class EnumDescriptor(_NestedDescriptorBase): + + """Descriptor for an enum defined in a .proto file. + + Attributes: + name (str): Name of the enum type. + full_name (str): Full name of the type, including package name + and any enclosing type(s). + + values (list[EnumValueDescriptor]): List of the values + in this enum. + values_by_name (dict(str, EnumValueDescriptor)): Same as :attr:`values`, + but indexed by the "name" field of each EnumValueDescriptor. + values_by_number (dict(int, EnumValueDescriptor)): Same as :attr:`values`, + but indexed by the "number" field of each EnumValueDescriptor. + containing_type (Descriptor): Descriptor of the immediate containing + type of this enum, or None if this is an enum defined at the + top level in a .proto file. Set by Descriptor's constructor + if we're passed into one. + file (FileDescriptor): Reference to file descriptor. + options (descriptor_pb2.EnumOptions): Enum options message or + None to use default enum options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.EnumDescriptor + + def __new__(cls, name, full_name, filename, values, + containing_type=None, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindEnumTypeByName(full_name) + + def __init__(self, name, full_name, filename, values, + containing_type=None, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + """Arguments are as described in the attribute description above. + + Note that filename is an obsolete argument, that is not used anymore. + Please use file.name to access this as an attribute. + """ + if create_key is not _internal_create_key: + _Deprecated('EnumDescriptor') + + super(EnumDescriptor, self).__init__( + options, 'EnumOptions', name, full_name, file, + containing_type, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + + self.values = values + for value in self.values: + value.type = self + self.values_by_name = dict((v.name, v) for v in values) + # Values are reversed to ensure that the first alias is retained. + self.values_by_number = dict((v.number, v) for v in reversed(values)) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.EnumDescriptorProto. + + Args: + proto (descriptor_pb2.EnumDescriptorProto): An empty descriptor proto. + """ + # This function is overridden to give a better doc comment. + super(EnumDescriptor, self).CopyToProto(proto) + + +class EnumValueDescriptor(DescriptorBase): + + """Descriptor for a single value within an enum. + + Attributes: + name (str): Name of this value. + index (int): Dense, 0-indexed index giving the order that this + value appears textually within its enum in the .proto file. + number (int): Actual number assigned to this enum value. + type (EnumDescriptor): :class:`EnumDescriptor` to which this value + belongs. Set by :class:`EnumDescriptor`'s constructor if we're + passed into one. + options (descriptor_pb2.EnumValueOptions): Enum value options message or + None to use default enum value options options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor + + def __new__(cls, name, index, number, + type=None, # pylint: disable=redefined-builtin + options=None, serialized_options=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + # There is no way we can build a complete EnumValueDescriptor with the + # given parameters (the name of the Enum is not known, for example). + # Fortunately generated files just pass it to the EnumDescriptor() + # constructor, which will ignore it, so returning None is good enough. + return None + + def __init__(self, name, index, number, + type=None, # pylint: disable=redefined-builtin + options=None, serialized_options=None, create_key=None): + """Arguments are as described in the attribute description above.""" + if create_key is not _internal_create_key: + _Deprecated('EnumValueDescriptor') + + super(EnumValueDescriptor, self).__init__( + options, serialized_options, 'EnumValueOptions') + self.name = name + self.index = index + self.number = number + self.type = type + + +class OneofDescriptor(DescriptorBase): + """Descriptor for a oneof field. + + Attributes: + name (str): Name of the oneof field. + full_name (str): Full name of the oneof field, including package name. + index (int): 0-based index giving the order of the oneof field inside + its containing type. + containing_type (Descriptor): :class:`Descriptor` of the protocol message + type that contains this field. Set by the :class:`Descriptor` constructor + if we're passed into one. + fields (list[FieldDescriptor]): The list of field descriptors this + oneof can contain. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.OneofDescriptor + + def __new__( + cls, name, full_name, index, containing_type, fields, options=None, + serialized_options=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindOneofByName(full_name) + + def __init__( + self, name, full_name, index, containing_type, fields, options=None, + serialized_options=None, create_key=None): + """Arguments are as described in the attribute description above.""" + if create_key is not _internal_create_key: + _Deprecated('OneofDescriptor') + + super(OneofDescriptor, self).__init__( + options, serialized_options, 'OneofOptions') + self.name = name + self.full_name = full_name + self.index = index + self.containing_type = containing_type + self.fields = fields + + +class ServiceDescriptor(_NestedDescriptorBase): + + """Descriptor for a service. + + Attributes: + name (str): Name of the service. + full_name (str): Full name of the service, including package name. + index (int): 0-indexed index giving the order that this services + definition appears within the .proto file. + methods (list[MethodDescriptor]): List of methods provided by this + service. + methods_by_name (dict(str, MethodDescriptor)): Same + :class:`MethodDescriptor` objects as in :attr:`methods_by_name`, but + indexed by "name" attribute in each :class:`MethodDescriptor`. + options (descriptor_pb2.ServiceOptions): Service options message or + None to use default service options. + file (FileDescriptor): Reference to file info. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.ServiceDescriptor + + def __new__( + cls, + name=None, + full_name=None, + index=None, + methods=None, + options=None, + serialized_options=None, + file=None, # pylint: disable=redefined-builtin + serialized_start=None, + serialized_end=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access + return _message.default_pool.FindServiceByName(full_name) + + def __init__(self, name, full_name, index, methods, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + if create_key is not _internal_create_key: + _Deprecated('ServiceDescriptor') + + super(ServiceDescriptor, self).__init__( + options, 'ServiceOptions', name, full_name, file, + None, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + self.index = index + self.methods = methods + self.methods_by_name = dict((m.name, m) for m in methods) + # Set the containing service for each method in this service. + for method in self.methods: + method.containing_service = self + + def FindMethodByName(self, name): + """Searches for the specified method, and returns its descriptor. + + Args: + name (str): Name of the method. + Returns: + MethodDescriptor or None: the descriptor for the requested method, if + found. + """ + return self.methods_by_name.get(name, None) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.ServiceDescriptorProto. + + Args: + proto (descriptor_pb2.ServiceDescriptorProto): An empty descriptor proto. + """ + # This function is overridden to give a better doc comment. + super(ServiceDescriptor, self).CopyToProto(proto) + + +class MethodDescriptor(DescriptorBase): + + """Descriptor for a method in a service. + + Attributes: + name (str): Name of the method within the service. + full_name (str): Full name of method. + index (int): 0-indexed index of the method inside the service. + containing_service (ServiceDescriptor): The service that contains this + method. + input_type (Descriptor): The descriptor of the message that this method + accepts. + output_type (Descriptor): The descriptor of the message that this method + returns. + client_streaming (bool): Whether this method uses client streaming. + server_streaming (bool): Whether this method uses server streaming. + options (descriptor_pb2.MethodOptions or None): Method options message, or + None to use default method options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.MethodDescriptor + + def __new__(cls, + name, + full_name, + index, + containing_service, + input_type, + output_type, + client_streaming=False, + server_streaming=False, + options=None, + serialized_options=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access + return _message.default_pool.FindMethodByName(full_name) + + def __init__(self, + name, + full_name, + index, + containing_service, + input_type, + output_type, + client_streaming=False, + server_streaming=False, + options=None, + serialized_options=None, + create_key=None): + """The arguments are as described in the description of MethodDescriptor + attributes above. + + Note that containing_service may be None, and may be set later if necessary. + """ + if create_key is not _internal_create_key: + _Deprecated('MethodDescriptor') + + super(MethodDescriptor, self).__init__( + options, serialized_options, 'MethodOptions') + self.name = name + self.full_name = full_name + self.index = index + self.containing_service = containing_service + self.input_type = input_type + self.output_type = output_type + self.client_streaming = client_streaming + self.server_streaming = server_streaming + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.MethodDescriptorProto. + + Args: + proto (descriptor_pb2.MethodDescriptorProto): An empty descriptor proto. + + Raises: + Error: If self couldn't be serialized, due to too few constructor + arguments. + """ + if self.containing_service is not None: + from google.protobuf import descriptor_pb2 + service_proto = descriptor_pb2.ServiceDescriptorProto() + self.containing_service.CopyToProto(service_proto) + proto.CopyFrom(service_proto.method[self.index]) + else: + raise Error('Descriptor does not contain a service.') + + +class FileDescriptor(DescriptorBase): + """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto. + + Note that :attr:`enum_types_by_name`, :attr:`extensions_by_name`, and + :attr:`dependencies` fields are only set by the + :py:mod:`google.protobuf.message_factory` module, and not by the generated + proto code. + + Attributes: + name (str): Name of file, relative to root of source tree. + package (str): Name of the package + syntax (str): string indicating syntax of the file (can be "proto2" or + "proto3") + serialized_pb (bytes): Byte string of serialized + :class:`descriptor_pb2.FileDescriptorProto`. + dependencies (list[FileDescriptor]): List of other :class:`FileDescriptor` + objects this :class:`FileDescriptor` depends on. + public_dependencies (list[FileDescriptor]): A subset of + :attr:`dependencies`, which were declared as "public". + message_types_by_name (dict(str, Descriptor)): Mapping from message names + to their :class:`Descriptor`. + enum_types_by_name (dict(str, EnumDescriptor)): Mapping from enum names to + their :class:`EnumDescriptor`. + extensions_by_name (dict(str, FieldDescriptor)): Mapping from extension + names declared at file scope to their :class:`FieldDescriptor`. + services_by_name (dict(str, ServiceDescriptor)): Mapping from services' + names to their :class:`ServiceDescriptor`. + pool (DescriptorPool): The pool this descriptor belongs to. When not + passed to the constructor, the global default pool is used. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.FileDescriptor + + def __new__(cls, name, package, options=None, + serialized_options=None, serialized_pb=None, + dependencies=None, public_dependencies=None, + syntax=None, pool=None, create_key=None): + # FileDescriptor() is called from various places, not only from generated + # files, to register dynamic proto files and messages. + # pylint: disable=g-explicit-bool-comparison + if serialized_pb == b'': + # Cpp generated code must be linked in if serialized_pb is '' + try: + return _message.default_pool.FindFileByName(name) + except KeyError: + raise RuntimeError('Please link in cpp generated lib for %s' % (name)) + elif serialized_pb: + return _message.default_pool.AddSerializedFile(serialized_pb) + else: + return super(FileDescriptor, cls).__new__(cls) + + def __init__(self, name, package, options=None, + serialized_options=None, serialized_pb=None, + dependencies=None, public_dependencies=None, + syntax=None, pool=None, create_key=None): + """Constructor.""" + if create_key is not _internal_create_key: + _Deprecated('FileDescriptor') + + super(FileDescriptor, self).__init__( + options, serialized_options, 'FileOptions') + + if pool is None: + from google.protobuf import descriptor_pool + pool = descriptor_pool.Default() + self.pool = pool + self.message_types_by_name = {} + self.name = name + self.package = package + self.syntax = syntax or "proto2" + self.serialized_pb = serialized_pb + + self.enum_types_by_name = {} + self.extensions_by_name = {} + self.services_by_name = {} + self.dependencies = (dependencies or []) + self.public_dependencies = (public_dependencies or []) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.FileDescriptorProto. + + Args: + proto: An empty descriptor_pb2.FileDescriptorProto. + """ + proto.ParseFromString(self.serialized_pb) + + +def _ParseOptions(message, string): + """Parses serialized options. + + This helper function is used to parse serialized options in generated + proto2 files. It must not be used outside proto2. + """ + message.ParseFromString(string) + return message + + +def _ToCamelCase(name): + """Converts name to camel-case and returns it.""" + capitalize_next = False + result = [] + + for c in name: + if c == '_': + if result: + capitalize_next = True + elif capitalize_next: + result.append(c.upper()) + capitalize_next = False + else: + result += c + + # Lower-case the first letter. + if result and result[0].isupper(): + result[0] = result[0].lower() + return ''.join(result) + + +def _OptionsOrNone(descriptor_proto): + """Returns the value of the field `options`, or None if it is not set.""" + if descriptor_proto.HasField('options'): + return descriptor_proto.options + else: + return None + + +def _ToJsonName(name): + """Converts name to Json name and returns it.""" + capitalize_next = False + result = [] + + for c in name: + if c == '_': + capitalize_next = True + elif capitalize_next: + result.append(c.upper()) + capitalize_next = False + else: + result += c + + return ''.join(result) + + +def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True, + syntax=None): + """Make a protobuf Descriptor given a DescriptorProto protobuf. + + Handles nested descriptors. Note that this is limited to the scope of defining + a message inside of another message. Composite fields can currently only be + resolved if the message is defined in the same scope as the field. + + Args: + desc_proto: The descriptor_pb2.DescriptorProto protobuf message. + package: Optional package name for the new message Descriptor (string). + build_file_if_cpp: Update the C++ descriptor pool if api matches. + Set to False on recursion, so no duplicates are created. + syntax: The syntax/semantics that should be used. Set to "proto3" to get + proto3 field presence semantics. + Returns: + A Descriptor for protobuf messages. + """ + if api_implementation.Type() == 'cpp' and build_file_if_cpp: + # The C++ implementation requires all descriptors to be backed by the same + # definition in the C++ descriptor pool. To do this, we build a + # FileDescriptorProto with the same definition as this descriptor and build + # it into the pool. + from google.protobuf import descriptor_pb2 + file_descriptor_proto = descriptor_pb2.FileDescriptorProto() + file_descriptor_proto.message_type.add().MergeFrom(desc_proto) + + # Generate a random name for this proto file to prevent conflicts with any + # imported ones. We need to specify a file name so the descriptor pool + # accepts our FileDescriptorProto, but it is not important what that file + # name is actually set to. + proto_name = binascii.hexlify(os.urandom(16)).decode('ascii') + + if package: + file_descriptor_proto.name = os.path.join(package.replace('.', '/'), + proto_name + '.proto') + file_descriptor_proto.package = package + else: + file_descriptor_proto.name = proto_name + '.proto' + + _message.default_pool.Add(file_descriptor_proto) + result = _message.default_pool.FindFileByName(file_descriptor_proto.name) + + if _USE_C_DESCRIPTORS: + return result.message_types_by_name[desc_proto.name] + + full_message_name = [desc_proto.name] + if package: full_message_name.insert(0, package) + + # Create Descriptors for enum types + enum_types = {} + for enum_proto in desc_proto.enum_type: + full_name = '.'.join(full_message_name + [enum_proto.name]) + enum_desc = EnumDescriptor( + enum_proto.name, full_name, None, [ + EnumValueDescriptor(enum_val.name, ii, enum_val.number, + create_key=_internal_create_key) + for ii, enum_val in enumerate(enum_proto.value)], + create_key=_internal_create_key) + enum_types[full_name] = enum_desc + + # Create Descriptors for nested types + nested_types = {} + for nested_proto in desc_proto.nested_type: + full_name = '.'.join(full_message_name + [nested_proto.name]) + # Nested types are just those defined inside of the message, not all types + # used by fields in the message, so no loops are possible here. + nested_desc = MakeDescriptor(nested_proto, + package='.'.join(full_message_name), + build_file_if_cpp=False, + syntax=syntax) + nested_types[full_name] = nested_desc + + fields = [] + for field_proto in desc_proto.field: + full_name = '.'.join(full_message_name + [field_proto.name]) + enum_desc = None + nested_desc = None + if field_proto.json_name: + json_name = field_proto.json_name + else: + json_name = None + if field_proto.HasField('type_name'): + type_name = field_proto.type_name + full_type_name = '.'.join(full_message_name + + [type_name[type_name.rfind('.')+1:]]) + if full_type_name in nested_types: + nested_desc = nested_types[full_type_name] + elif full_type_name in enum_types: + enum_desc = enum_types[full_type_name] + # Else type_name references a non-local type, which isn't implemented + field = FieldDescriptor( + field_proto.name, full_name, field_proto.number - 1, + field_proto.number, field_proto.type, + FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type), + field_proto.label, None, nested_desc, enum_desc, None, False, None, + options=_OptionsOrNone(field_proto), has_default_value=False, + json_name=json_name, create_key=_internal_create_key) + fields.append(field) + + desc_name = '.'.join(full_message_name) + return Descriptor(desc_proto.name, desc_name, None, None, fields, + list(nested_types.values()), list(enum_types.values()), [], + options=_OptionsOrNone(desc_proto), + create_key=_internal_create_key) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/descriptor_database.py b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_database.py new file mode 100644 index 0000000000..073eddc711 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_database.py @@ -0,0 +1,177 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides a container for DescriptorProtos.""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +import warnings + + +class Error(Exception): + pass + + +class DescriptorDatabaseConflictingDefinitionError(Error): + """Raised when a proto is added with the same name & different descriptor.""" + + +class DescriptorDatabase(object): + """A container accepting FileDescriptorProtos and maps DescriptorProtos.""" + + def __init__(self): + self._file_desc_protos_by_file = {} + self._file_desc_protos_by_symbol = {} + + def Add(self, file_desc_proto): + """Adds the FileDescriptorProto and its types to this database. + + Args: + file_desc_proto: The FileDescriptorProto to add. + Raises: + DescriptorDatabaseConflictingDefinitionError: if an attempt is made to + add a proto with the same name but different definition than an + existing proto in the database. + """ + proto_name = file_desc_proto.name + if proto_name not in self._file_desc_protos_by_file: + self._file_desc_protos_by_file[proto_name] = file_desc_proto + elif self._file_desc_protos_by_file[proto_name] != file_desc_proto: + raise DescriptorDatabaseConflictingDefinitionError( + '%s already added, but with different descriptor.' % proto_name) + else: + return + + # Add all the top-level descriptors to the index. + package = file_desc_proto.package + for message in file_desc_proto.message_type: + for name in _ExtractSymbols(message, package): + self._AddSymbol(name, file_desc_proto) + for enum in file_desc_proto.enum_type: + self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto) + for enum_value in enum.value: + self._file_desc_protos_by_symbol[ + '.'.join((package, enum_value.name))] = file_desc_proto + for extension in file_desc_proto.extension: + self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto) + for service in file_desc_proto.service: + self._AddSymbol(('.'.join((package, service.name))), file_desc_proto) + + def FindFileByName(self, name): + """Finds the file descriptor proto by file name. + + Typically the file name is a relative path ending to a .proto file. The + proto with the given name will have to have been added to this database + using the Add method or else an error will be raised. + + Args: + name: The file name to find. + + Returns: + The file descriptor proto matching the name. + + Raises: + KeyError if no file by the given name was added. + """ + + return self._file_desc_protos_by_file[name] + + def FindFileContainingSymbol(self, symbol): + """Finds the file descriptor proto containing the specified symbol. + + The symbol should be a fully qualified name including the file descriptor's + package and any containing messages. Some examples: + + 'some.package.name.Message' + 'some.package.name.Message.NestedEnum' + 'some.package.name.Message.some_field' + + The file descriptor proto containing the specified symbol must be added to + this database using the Add method or else an error will be raised. + + Args: + symbol: The fully qualified symbol name. + + Returns: + The file descriptor proto containing the symbol. + + Raises: + KeyError if no file contains the specified symbol. + """ + try: + return self._file_desc_protos_by_symbol[symbol] + except KeyError: + # Fields, enum values, and nested extensions are not in + # _file_desc_protos_by_symbol. Try to find the top level + # descriptor. Non-existent nested symbol under a valid top level + # descriptor can also be found. The behavior is the same with + # protobuf C++. + top_level, _, _ = symbol.rpartition('.') + try: + return self._file_desc_protos_by_symbol[top_level] + except KeyError: + # Raise the original symbol as a KeyError for better diagnostics. + raise KeyError(symbol) + + def FindFileContainingExtension(self, extendee_name, extension_number): + # TODO(jieluo): implement this API. + return None + + def FindAllExtensionNumbers(self, extendee_name): + # TODO(jieluo): implement this API. + return [] + + def _AddSymbol(self, name, file_desc_proto): + if name in self._file_desc_protos_by_symbol: + warn_msg = ('Conflict register for file "' + file_desc_proto.name + + '": ' + name + + ' is already defined in file "' + + self._file_desc_protos_by_symbol[name].name + '"') + warnings.warn(warn_msg, RuntimeWarning) + self._file_desc_protos_by_symbol[name] = file_desc_proto + + +def _ExtractSymbols(desc_proto, package): + """Pulls out all the symbols from a descriptor proto. + + Args: + desc_proto: The proto to extract symbols from. + package: The package containing the descriptor type. + + Yields: + The fully qualified name found in the descriptor. + """ + message_name = package + '.' + desc_proto.name if package else desc_proto.name + yield message_name + for nested_type in desc_proto.nested_type: + for symbol in _ExtractSymbols(nested_type, message_name): + yield symbol + for enum_type in desc_proto.enum_type: + yield '.'.join((message_name, enum_type.name)) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pb2.py new file mode 100644 index 0000000000..f570386432 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pb2.py @@ -0,0 +1,1925 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/descriptor.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/descriptor.proto', + package='google.protobuf', + syntax='proto2', + serialized_options=None, + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xbe\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection' + ) +else: + DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xbe\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection') + +if _descriptor._USE_C_DESCRIPTORS == False: + _FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.protobuf.FieldDescriptorProto.Type', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='TYPE_DOUBLE', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FLOAT', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_INT64', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_UINT64', index=3, number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_INT32', index=4, number=5, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FIXED64', index=5, number=6, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FIXED32', index=6, number=7, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_BOOL', index=7, number=8, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_STRING', index=8, number=9, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_GROUP', index=9, number=10, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_MESSAGE', index=10, number=11, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_BYTES', index=11, number=12, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_UINT32', index=12, number=13, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_ENUM', index=13, number=14, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SFIXED32', index=14, number=15, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SFIXED64', index=15, number=16, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SINT32', index=16, number=17, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SINT64', index=17, number=18, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE) + + _FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor( + name='Label', + full_name='google.protobuf.FieldDescriptorProto.Label', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='LABEL_OPTIONAL', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LABEL_REQUIRED', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LABEL_REPEATED', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL) + + _FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor( + name='OptimizeMode', + full_name='google.protobuf.FileOptions.OptimizeMode', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='SPEED', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='CODE_SIZE', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LITE_RUNTIME', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE) + + _FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor( + name='CType', + full_name='google.protobuf.FieldOptions.CType', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='STRING', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='CORD', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='STRING_PIECE', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE) + + _FIELDOPTIONS_JSTYPE = _descriptor.EnumDescriptor( + name='JSType', + full_name='google.protobuf.FieldOptions.JSType', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='JS_NORMAL', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='JS_STRING', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='JS_NUMBER', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JSTYPE) + + _METHODOPTIONS_IDEMPOTENCYLEVEL = _descriptor.EnumDescriptor( + name='IdempotencyLevel', + full_name='google.protobuf.MethodOptions.IdempotencyLevel', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='IDEMPOTENCY_UNKNOWN', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='NO_SIDE_EFFECTS', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='IDEMPOTENT', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_METHODOPTIONS_IDEMPOTENCYLEVEL) + + + _FILEDESCRIPTORSET = _descriptor.Descriptor( + name='FileDescriptorSet', + full_name='google.protobuf.FileDescriptorSet', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _FILEDESCRIPTORPROTO = _descriptor.Descriptor( + name='FileDescriptorProto', + full_name='google.protobuf.FileDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='public_dependency', full_name='google.protobuf.FileDescriptorProto.public_dependency', index=3, + number=10, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='weak_dependency', full_name='google.protobuf.FileDescriptorProto.weak_dependency', index=4, + number=11, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=5, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=6, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='service', full_name='google.protobuf.FileDescriptorProto.service', index=7, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=8, + number=7, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.FileDescriptorProto.options', index=9, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='source_code_info', full_name='google.protobuf.FileDescriptorProto.source_code_info', index=10, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='syntax', full_name='google.protobuf.FileDescriptorProto.syntax', index=11, + number=12, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor( + name='ExtensionRange', + full_name='google.protobuf.DescriptorProto.ExtensionRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.DescriptorProto.ExtensionRange.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _DESCRIPTORPROTO_RESERVEDRANGE = _descriptor.Descriptor( + name='ReservedRange', + full_name='google.protobuf.DescriptorProto.ReservedRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.DescriptorProto.ReservedRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.DescriptorProto.ReservedRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _DESCRIPTORPROTO = _descriptor.Descriptor( + name='DescriptorProto', + full_name='google.protobuf.DescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.DescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='field', full_name='google.protobuf.DescriptorProto.field', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='oneof_decl', full_name='google.protobuf.DescriptorProto.oneof_decl', index=6, + number=8, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.DescriptorProto.options', index=7, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_range', full_name='google.protobuf.DescriptorProto.reserved_range', index=8, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_name', full_name='google.protobuf.DescriptorProto.reserved_name', index=9, + number=10, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, _DESCRIPTORPROTO_RESERVEDRANGE, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _EXTENSIONRANGEOPTIONS = _descriptor.Descriptor( + name='ExtensionRangeOptions', + full_name='google.protobuf.ExtensionRangeOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.ExtensionRangeOptions.uninterpreted_option', index=0, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _FIELDDESCRIPTORPROTO = _descriptor.Descriptor( + name='FieldDescriptorProto', + full_name='google.protobuf.FieldDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6, + number=7, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='oneof_index', full_name='google.protobuf.FieldDescriptorProto.oneof_index', index=7, + number=9, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='json_name', full_name='google.protobuf.FieldDescriptorProto.json_name', index=8, + number=10, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=9, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='proto3_optional', full_name='google.protobuf.FieldDescriptorProto.proto3_optional', index=10, + number=17, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FIELDDESCRIPTORPROTO_TYPE, + _FIELDDESCRIPTORPROTO_LABEL, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ONEOFDESCRIPTORPROTO = _descriptor.Descriptor( + name='OneofDescriptorProto', + full_name='google.protobuf.OneofDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.OneofDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.OneofDescriptorProto.options', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE = _descriptor.Descriptor( + name='EnumReservedRange', + full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _ENUMDESCRIPTORPROTO = _descriptor.Descriptor( + name='EnumDescriptorProto', + full_name='google.protobuf.EnumDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_range', full_name='google.protobuf.EnumDescriptorProto.reserved_range', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_name', full_name='google.protobuf.EnumDescriptorProto.reserved_name', index=4, + number=5, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor( + name='EnumValueDescriptorProto', + full_name='google.protobuf.EnumValueDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _SERVICEDESCRIPTORPROTO = _descriptor.Descriptor( + name='ServiceDescriptorProto', + full_name='google.protobuf.ServiceDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _METHODDESCRIPTORPROTO = _descriptor.Descriptor( + name='MethodDescriptorProto', + full_name='google.protobuf.MethodDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='client_streaming', full_name='google.protobuf.MethodDescriptorProto.client_streaming', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='server_streaming', full_name='google.protobuf.MethodDescriptorProto.server_streaming', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _FILEOPTIONS = _descriptor.Descriptor( + name='FileOptions', + full_name='google.protobuf.FileOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2, + number=10, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_generate_equals_and_hash', full_name='google.protobuf.FileOptions.java_generate_equals_and_hash', index=3, + number=20, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_string_check_utf8', full_name='google.protobuf.FileOptions.java_string_check_utf8', index=4, + number=27, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=5, + number=9, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='go_package', full_name='google.protobuf.FileOptions.go_package', index=6, + number=11, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=7, + number=16, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=8, + number=17, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=9, + number=18, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_generic_services', full_name='google.protobuf.FileOptions.php_generic_services', index=10, + number=42, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.FileOptions.deprecated', index=11, + number=23, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='cc_enable_arenas', full_name='google.protobuf.FileOptions.cc_enable_arenas', index=12, + number=31, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='objc_class_prefix', full_name='google.protobuf.FileOptions.objc_class_prefix', index=13, + number=36, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='csharp_namespace', full_name='google.protobuf.FileOptions.csharp_namespace', index=14, + number=37, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='swift_prefix', full_name='google.protobuf.FileOptions.swift_prefix', index=15, + number=39, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_class_prefix', full_name='google.protobuf.FileOptions.php_class_prefix', index=16, + number=40, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_namespace', full_name='google.protobuf.FileOptions.php_namespace', index=17, + number=41, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_metadata_namespace', full_name='google.protobuf.FileOptions.php_metadata_namespace', index=18, + number=44, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='ruby_package', full_name='google.protobuf.FileOptions.ruby_package', index=19, + number=45, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=20, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FILEOPTIONS_OPTIMIZEMODE, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _MESSAGEOPTIONS = _descriptor.Descriptor( + name='MessageOptions', + full_name='google.protobuf.MessageOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.MessageOptions.deprecated', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='map_entry', full_name='google.protobuf.MessageOptions.map_entry', index=3, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=4, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _FIELDOPTIONS = _descriptor.Descriptor( + name='FieldOptions', + full_name='google.protobuf.FieldOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='packed', full_name='google.protobuf.FieldOptions.packed', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='jstype', full_name='google.protobuf.FieldOptions.jstype', index=2, + number=6, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='lazy', full_name='google.protobuf.FieldOptions.lazy', index=3, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='unverified_lazy', full_name='google.protobuf.FieldOptions.unverified_lazy', index=4, + number=15, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=5, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='weak', full_name='google.protobuf.FieldOptions.weak', index=6, + number=10, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=7, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FIELDOPTIONS_CTYPE, + _FIELDOPTIONS_JSTYPE, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ONEOFOPTIONS = _descriptor.Descriptor( + name='OneofOptions', + full_name='google.protobuf.OneofOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.OneofOptions.uninterpreted_option', index=0, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ENUMOPTIONS = _descriptor.Descriptor( + name='EnumOptions', + full_name='google.protobuf.EnumOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='allow_alias', full_name='google.protobuf.EnumOptions.allow_alias', index=0, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.EnumOptions.deprecated', index=1, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=2, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ENUMVALUEOPTIONS = _descriptor.Descriptor( + name='EnumValueOptions', + full_name='google.protobuf.EnumValueOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.EnumValueOptions.deprecated', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=1, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _SERVICEOPTIONS = _descriptor.Descriptor( + name='ServiceOptions', + full_name='google.protobuf.ServiceOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.ServiceOptions.deprecated', index=0, + number=33, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=1, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _METHODOPTIONS = _descriptor.Descriptor( + name='MethodOptions', + full_name='google.protobuf.MethodOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.MethodOptions.deprecated', index=0, + number=33, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='idempotency_level', full_name='google.protobuf.MethodOptions.idempotency_level', index=1, + number=34, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=2, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _METHODOPTIONS_IDEMPOTENCYLEVEL, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor( + name='NamePart', + full_name='google.protobuf.UninterpretedOption.NamePart', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0, + number=1, type=9, cpp_type=9, label=2, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1, + number=2, type=8, cpp_type=7, label=2, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _UNINTERPRETEDOPTION = _descriptor.Descriptor( + name='UninterpretedOption', + full_name='google.protobuf.UninterpretedOption', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.UninterpretedOption.name', index=0, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2, + number=4, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3, + number=5, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4, + number=6, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5, + number=7, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='aggregate_value', full_name='google.protobuf.UninterpretedOption.aggregate_value', index=6, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _SOURCECODEINFO_LOCATION = _descriptor.Descriptor( + name='Location', + full_name='google.protobuf.SourceCodeInfo.Location', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='google.protobuf.SourceCodeInfo.Location.path', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='span', full_name='google.protobuf.SourceCodeInfo.Location.span', index=1, + number=2, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='leading_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_comments', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='trailing_comments', full_name='google.protobuf.SourceCodeInfo.Location.trailing_comments', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='leading_detached_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_detached_comments', index=4, + number=6, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _SOURCECODEINFO = _descriptor.Descriptor( + name='SourceCodeInfo', + full_name='google.protobuf.SourceCodeInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='location', full_name='google.protobuf.SourceCodeInfo.location', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_SOURCECODEINFO_LOCATION, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _GENERATEDCODEINFO_ANNOTATION = _descriptor.Descriptor( + name='Annotation', + full_name='google.protobuf.GeneratedCodeInfo.Annotation', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='google.protobuf.GeneratedCodeInfo.Annotation.path', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='source_file', full_name='google.protobuf.GeneratedCodeInfo.Annotation.source_file', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='begin', full_name='google.protobuf.GeneratedCodeInfo.Annotation.begin', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.GeneratedCodeInfo.Annotation.end', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _GENERATEDCODEINFO = _descriptor.Descriptor( + name='GeneratedCodeInfo', + full_name='google.protobuf.GeneratedCodeInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='annotation', full_name='google.protobuf.GeneratedCodeInfo.annotation', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_GENERATEDCODEINFO_ANNOTATION, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS + _FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO + _DESCRIPTORPROTO_EXTENSIONRANGE.fields_by_name['options'].message_type = _EXTENSIONRANGEOPTIONS + _DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO_RESERVEDRANGE.containing_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE + _DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS + _DESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _DESCRIPTORPROTO_RESERVEDRANGE + _EXTENSIONRANGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL + _FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE + _FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS + _FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO + _FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO + _ONEOFDESCRIPTORPROTO.fields_by_name['options'].message_type = _ONEOFOPTIONS + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE.containing_type = _ENUMDESCRIPTORPROTO + _ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO + _ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS + _ENUMDESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE + _ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS + _SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO + _SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS + _METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS + _FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE + _FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS + _MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE + _FIELDOPTIONS.fields_by_name['jstype'].enum_type = _FIELDOPTIONS_JSTYPE + _FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS + _FIELDOPTIONS_JSTYPE.containing_type = _FIELDOPTIONS + _ONEOFOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _METHODOPTIONS.fields_by_name['idempotency_level'].enum_type = _METHODOPTIONS_IDEMPOTENCYLEVEL + _METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _METHODOPTIONS_IDEMPOTENCYLEVEL.containing_type = _METHODOPTIONS + _UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION + _UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART + _SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO + _SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION + _GENERATEDCODEINFO_ANNOTATION.containing_type = _GENERATEDCODEINFO + _GENERATEDCODEINFO.fields_by_name['annotation'].message_type = _GENERATEDCODEINFO_ANNOTATION + DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET + DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['ExtensionRangeOptions'] = _EXTENSIONRANGEOPTIONS + DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS + DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS + DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS + DESCRIPTOR.message_types_by_name['OneofOptions'] = _ONEOFOPTIONS + DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS + DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS + DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS + DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS + DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION + DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO + DESCRIPTOR.message_types_by_name['GeneratedCodeInfo'] = _GENERATEDCODEINFO + _sym_db.RegisterFileDescriptor(DESCRIPTOR) + +else: + _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.descriptor_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _FILEDESCRIPTORSET._serialized_start=53 + _FILEDESCRIPTORSET._serialized_end=124 + _FILEDESCRIPTORPROTO._serialized_start=127 + _FILEDESCRIPTORPROTO._serialized_end=602 + _DESCRIPTORPROTO._serialized_start=605 + _DESCRIPTORPROTO._serialized_end=1286 + _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_start=1140 + _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_end=1241 + _DESCRIPTORPROTO_RESERVEDRANGE._serialized_start=1243 + _DESCRIPTORPROTO_RESERVEDRANGE._serialized_end=1286 + _EXTENSIONRANGEOPTIONS._serialized_start=1288 + _EXTENSIONRANGEOPTIONS._serialized_end=1391 + _FIELDDESCRIPTORPROTO._serialized_start=1394 + _FIELDDESCRIPTORPROTO._serialized_end=2119 + _FIELDDESCRIPTORPROTO_TYPE._serialized_start=1740 + _FIELDDESCRIPTORPROTO_TYPE._serialized_end=2050 + _FIELDDESCRIPTORPROTO_LABEL._serialized_start=2052 + _FIELDDESCRIPTORPROTO_LABEL._serialized_end=2119 + _ONEOFDESCRIPTORPROTO._serialized_start=2121 + _ONEOFDESCRIPTORPROTO._serialized_end=2205 + _ENUMDESCRIPTORPROTO._serialized_start=2208 + _ENUMDESCRIPTORPROTO._serialized_end=2500 + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_start=2453 + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_end=2500 + _ENUMVALUEDESCRIPTORPROTO._serialized_start=2502 + _ENUMVALUEDESCRIPTORPROTO._serialized_end=2610 + _SERVICEDESCRIPTORPROTO._serialized_start=2613 + _SERVICEDESCRIPTORPROTO._serialized_end=2757 + _METHODDESCRIPTORPROTO._serialized_start=2760 + _METHODDESCRIPTORPROTO._serialized_end=2953 + _FILEOPTIONS._serialized_start=2956 + _FILEOPTIONS._serialized_end=3761 + _FILEOPTIONS_OPTIMIZEMODE._serialized_start=3686 + _FILEOPTIONS_OPTIMIZEMODE._serialized_end=3744 + _MESSAGEOPTIONS._serialized_start=3764 + _MESSAGEOPTIONS._serialized_end=4024 + _FIELDOPTIONS._serialized_start=4027 + _FIELDOPTIONS._serialized_end=4473 + _FIELDOPTIONS_CTYPE._serialized_start=4354 + _FIELDOPTIONS_CTYPE._serialized_end=4401 + _FIELDOPTIONS_JSTYPE._serialized_start=4403 + _FIELDOPTIONS_JSTYPE._serialized_end=4456 + _ONEOFOPTIONS._serialized_start=4475 + _ONEOFOPTIONS._serialized_end=4569 + _ENUMOPTIONS._serialized_start=4572 + _ENUMOPTIONS._serialized_end=4719 + _ENUMVALUEOPTIONS._serialized_start=4721 + _ENUMVALUEOPTIONS._serialized_end=4846 + _SERVICEOPTIONS._serialized_start=4848 + _SERVICEOPTIONS._serialized_end=4971 + _METHODOPTIONS._serialized_start=4974 + _METHODOPTIONS._serialized_end=5275 + _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_start=5184 + _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_end=5264 + _UNINTERPRETEDOPTION._serialized_start=5278 + _UNINTERPRETEDOPTION._serialized_end=5564 + _UNINTERPRETEDOPTION_NAMEPART._serialized_start=5513 + _UNINTERPRETEDOPTION_NAMEPART._serialized_end=5564 + _SOURCECODEINFO._serialized_start=5567 + _SOURCECODEINFO._serialized_end=5780 + _SOURCECODEINFO_LOCATION._serialized_start=5646 + _SOURCECODEINFO_LOCATION._serialized_end=5780 + _GENERATEDCODEINFO._serialized_start=5783 + _GENERATEDCODEINFO._serialized_end=5950 + _GENERATEDCODEINFO_ANNOTATION._serialized_start=5871 + _GENERATEDCODEINFO_ANNOTATION._serialized_end=5950 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pool.py b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pool.py new file mode 100644 index 0000000000..911372a8b0 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pool.py @@ -0,0 +1,1295 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides DescriptorPool to use as a container for proto2 descriptors. + +The DescriptorPool is used in conjection with a DescriptorDatabase to maintain +a collection of protocol buffer descriptors for use when dynamically creating +message types at runtime. + +For most applications protocol buffers should be used via modules generated by +the protocol buffer compiler tool. This should only be used when the type of +protocol buffers used in an application or library cannot be predetermined. + +Below is a straightforward example on how to use this class:: + + pool = DescriptorPool() + file_descriptor_protos = [ ... ] + for file_descriptor_proto in file_descriptor_protos: + pool.Add(file_descriptor_proto) + my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType') + +The message descriptor can be used in conjunction with the message_factory +module in order to create a protocol buffer class that can be encoded and +decoded. + +If you want to get a Python class for the specified proto, use the +helper functions inside google.protobuf.message_factory +directly instead of this class. +""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +import collections +import warnings + +from google.protobuf import descriptor +from google.protobuf import descriptor_database +from google.protobuf import text_encoding + + +_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access + + +def _Deprecated(func): + """Mark functions as deprecated.""" + + def NewFunc(*args, **kwargs): + warnings.warn( + 'Call to deprecated function %s(). Note: Do add unlinked descriptors ' + 'to descriptor_pool is wrong. Use Add() or AddSerializedFile() ' + 'instead.' % func.__name__, + category=DeprecationWarning) + return func(*args, **kwargs) + NewFunc.__name__ = func.__name__ + NewFunc.__doc__ = func.__doc__ + NewFunc.__dict__.update(func.__dict__) + return NewFunc + + +def _NormalizeFullyQualifiedName(name): + """Remove leading period from fully-qualified type name. + + Due to b/13860351 in descriptor_database.py, types in the root namespace are + generated with a leading period. This function removes that prefix. + + Args: + name (str): The fully-qualified symbol name. + + Returns: + str: The normalized fully-qualified symbol name. + """ + return name.lstrip('.') + + +def _OptionsOrNone(descriptor_proto): + """Returns the value of the field `options`, or None if it is not set.""" + if descriptor_proto.HasField('options'): + return descriptor_proto.options + else: + return None + + +def _IsMessageSetExtension(field): + return (field.is_extension and + field.containing_type.has_options and + field.containing_type.GetOptions().message_set_wire_format and + field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL) + + +class DescriptorPool(object): + """A collection of protobufs dynamically constructed by descriptor protos.""" + + if _USE_C_DESCRIPTORS: + + def __new__(cls, descriptor_db=None): + # pylint: disable=protected-access + return descriptor._message.DescriptorPool(descriptor_db) + + def __init__(self, descriptor_db=None): + """Initializes a Pool of proto buffs. + + The descriptor_db argument to the constructor is provided to allow + specialized file descriptor proto lookup code to be triggered on demand. An + example would be an implementation which will read and compile a file + specified in a call to FindFileByName() and not require the call to Add() + at all. Results from this database will be cached internally here as well. + + Args: + descriptor_db: A secondary source of file descriptors. + """ + + self._internal_db = descriptor_database.DescriptorDatabase() + self._descriptor_db = descriptor_db + self._descriptors = {} + self._enum_descriptors = {} + self._service_descriptors = {} + self._file_descriptors = {} + self._toplevel_extensions = {} + # TODO(jieluo): Remove _file_desc_by_toplevel_extension after + # maybe year 2020 for compatibility issue (with 3.4.1 only). + self._file_desc_by_toplevel_extension = {} + self._top_enum_values = {} + # We store extensions in two two-level mappings: The first key is the + # descriptor of the message being extended, the second key is the extension + # full name or its tag number. + self._extensions_by_name = collections.defaultdict(dict) + self._extensions_by_number = collections.defaultdict(dict) + + def _CheckConflictRegister(self, desc, desc_name, file_name): + """Check if the descriptor name conflicts with another of the same name. + + Args: + desc: Descriptor of a message, enum, service, extension or enum value. + desc_name (str): the full name of desc. + file_name (str): The file name of descriptor. + """ + for register, descriptor_type in [ + (self._descriptors, descriptor.Descriptor), + (self._enum_descriptors, descriptor.EnumDescriptor), + (self._service_descriptors, descriptor.ServiceDescriptor), + (self._toplevel_extensions, descriptor.FieldDescriptor), + (self._top_enum_values, descriptor.EnumValueDescriptor)]: + if desc_name in register: + old_desc = register[desc_name] + if isinstance(old_desc, descriptor.EnumValueDescriptor): + old_file = old_desc.type.file.name + else: + old_file = old_desc.file.name + + if not isinstance(desc, descriptor_type) or ( + old_file != file_name): + error_msg = ('Conflict register for file "' + file_name + + '": ' + desc_name + + ' is already defined in file "' + + old_file + '". Please fix the conflict by adding ' + 'package name on the proto file, or use different ' + 'name for the duplication.') + if isinstance(desc, descriptor.EnumValueDescriptor): + error_msg += ('\nNote: enum values appear as ' + 'siblings of the enum type instead of ' + 'children of it.') + + raise TypeError(error_msg) + + return + + def Add(self, file_desc_proto): + """Adds the FileDescriptorProto and its types to this pool. + + Args: + file_desc_proto (FileDescriptorProto): The file descriptor to add. + """ + + self._internal_db.Add(file_desc_proto) + + def AddSerializedFile(self, serialized_file_desc_proto): + """Adds the FileDescriptorProto and its types to this pool. + + Args: + serialized_file_desc_proto (bytes): A bytes string, serialization of the + :class:`FileDescriptorProto` to add. + + Returns: + FileDescriptor: Descriptor for the added file. + """ + + # pylint: disable=g-import-not-at-top + from google.protobuf import descriptor_pb2 + file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString( + serialized_file_desc_proto) + file_desc = self._ConvertFileProtoToFileDescriptor(file_desc_proto) + file_desc.serialized_pb = serialized_file_desc_proto + return file_desc + + # Add Descriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddDescriptor(self, desc): + self._AddDescriptor(desc) + + # Never call this method. It is for internal usage only. + def _AddDescriptor(self, desc): + """Adds a Descriptor to the pool, non-recursively. + + If the Descriptor contains nested messages or enums, the caller must + explicitly register them. This method also registers the FileDescriptor + associated with the message. + + Args: + desc: A Descriptor. + """ + if not isinstance(desc, descriptor.Descriptor): + raise TypeError('Expected instance of descriptor.Descriptor.') + + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + + self._descriptors[desc.full_name] = desc + self._AddFileDescriptor(desc.file) + + # Add EnumDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddEnumDescriptor(self, enum_desc): + self._AddEnumDescriptor(enum_desc) + + # Never call this method. It is for internal usage only. + def _AddEnumDescriptor(self, enum_desc): + """Adds an EnumDescriptor to the pool. + + This method also registers the FileDescriptor associated with the enum. + + Args: + enum_desc: An EnumDescriptor. + """ + + if not isinstance(enum_desc, descriptor.EnumDescriptor): + raise TypeError('Expected instance of descriptor.EnumDescriptor.') + + file_name = enum_desc.file.name + self._CheckConflictRegister(enum_desc, enum_desc.full_name, file_name) + self._enum_descriptors[enum_desc.full_name] = enum_desc + + # Top enum values need to be indexed. + # Count the number of dots to see whether the enum is toplevel or nested + # in a message. We cannot use enum_desc.containing_type at this stage. + if enum_desc.file.package: + top_level = (enum_desc.full_name.count('.') + - enum_desc.file.package.count('.') == 1) + else: + top_level = enum_desc.full_name.count('.') == 0 + if top_level: + file_name = enum_desc.file.name + package = enum_desc.file.package + for enum_value in enum_desc.values: + full_name = _NormalizeFullyQualifiedName( + '.'.join((package, enum_value.name))) + self._CheckConflictRegister(enum_value, full_name, file_name) + self._top_enum_values[full_name] = enum_value + self._AddFileDescriptor(enum_desc.file) + + # Add ServiceDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddServiceDescriptor(self, service_desc): + self._AddServiceDescriptor(service_desc) + + # Never call this method. It is for internal usage only. + def _AddServiceDescriptor(self, service_desc): + """Adds a ServiceDescriptor to the pool. + + Args: + service_desc: A ServiceDescriptor. + """ + + if not isinstance(service_desc, descriptor.ServiceDescriptor): + raise TypeError('Expected instance of descriptor.ServiceDescriptor.') + + self._CheckConflictRegister(service_desc, service_desc.full_name, + service_desc.file.name) + self._service_descriptors[service_desc.full_name] = service_desc + + # Add ExtensionDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddExtensionDescriptor(self, extension): + self._AddExtensionDescriptor(extension) + + # Never call this method. It is for internal usage only. + def _AddExtensionDescriptor(self, extension): + """Adds a FieldDescriptor describing an extension to the pool. + + Args: + extension: A FieldDescriptor. + + Raises: + AssertionError: when another extension with the same number extends the + same message. + TypeError: when the specified extension is not a + descriptor.FieldDescriptor. + """ + if not (isinstance(extension, descriptor.FieldDescriptor) and + extension.is_extension): + raise TypeError('Expected an extension descriptor.') + + if extension.extension_scope is None: + self._toplevel_extensions[extension.full_name] = extension + + try: + existing_desc = self._extensions_by_number[ + extension.containing_type][extension.number] + except KeyError: + pass + else: + if extension is not existing_desc: + raise AssertionError( + 'Extensions "%s" and "%s" both try to extend message type "%s" ' + 'with field number %d.' % + (extension.full_name, existing_desc.full_name, + extension.containing_type.full_name, extension.number)) + + self._extensions_by_number[extension.containing_type][ + extension.number] = extension + self._extensions_by_name[extension.containing_type][ + extension.full_name] = extension + + # Also register MessageSet extensions with the type name. + if _IsMessageSetExtension(extension): + self._extensions_by_name[extension.containing_type][ + extension.message_type.full_name] = extension + + @_Deprecated + def AddFileDescriptor(self, file_desc): + self._InternalAddFileDescriptor(file_desc) + + # Never call this method. It is for internal usage only. + def _InternalAddFileDescriptor(self, file_desc): + """Adds a FileDescriptor to the pool, non-recursively. + + If the FileDescriptor contains messages or enums, the caller must explicitly + register them. + + Args: + file_desc: A FileDescriptor. + """ + + self._AddFileDescriptor(file_desc) + # TODO(jieluo): This is a temporary solution for FieldDescriptor.file. + # FieldDescriptor.file is added in code gen. Remove this solution after + # maybe 2020 for compatibility reason (with 3.4.1 only). + for extension in file_desc.extensions_by_name.values(): + self._file_desc_by_toplevel_extension[ + extension.full_name] = file_desc + + def _AddFileDescriptor(self, file_desc): + """Adds a FileDescriptor to the pool, non-recursively. + + If the FileDescriptor contains messages or enums, the caller must explicitly + register them. + + Args: + file_desc: A FileDescriptor. + """ + + if not isinstance(file_desc, descriptor.FileDescriptor): + raise TypeError('Expected instance of descriptor.FileDescriptor.') + self._file_descriptors[file_desc.name] = file_desc + + def FindFileByName(self, file_name): + """Gets a FileDescriptor by file name. + + Args: + file_name (str): The path to the file to get a descriptor for. + + Returns: + FileDescriptor: The descriptor for the named file. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + + try: + return self._file_descriptors[file_name] + except KeyError: + pass + + try: + file_proto = self._internal_db.FindFileByName(file_name) + except KeyError as error: + if self._descriptor_db: + file_proto = self._descriptor_db.FindFileByName(file_name) + else: + raise error + if not file_proto: + raise KeyError('Cannot find a file named %s' % file_name) + return self._ConvertFileProtoToFileDescriptor(file_proto) + + def FindFileContainingSymbol(self, symbol): + """Gets the FileDescriptor for the file containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: Descriptor for the file that contains the specified + symbol. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + + symbol = _NormalizeFullyQualifiedName(symbol) + try: + return self._InternalFindFileContainingSymbol(symbol) + except KeyError: + pass + + try: + # Try fallback database. Build and find again if possible. + self._FindFileContainingSymbolInDb(symbol) + return self._InternalFindFileContainingSymbol(symbol) + except KeyError: + raise KeyError('Cannot find a file containing %s' % symbol) + + def _InternalFindFileContainingSymbol(self, symbol): + """Gets the already built FileDescriptor containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: Descriptor for the file that contains the specified + symbol. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + try: + return self._descriptors[symbol].file + except KeyError: + pass + + try: + return self._enum_descriptors[symbol].file + except KeyError: + pass + + try: + return self._service_descriptors[symbol].file + except KeyError: + pass + + try: + return self._top_enum_values[symbol].type.file + except KeyError: + pass + + try: + return self._file_desc_by_toplevel_extension[symbol] + except KeyError: + pass + + # Try fields, enum values and nested extensions inside a message. + top_name, _, sub_name = symbol.rpartition('.') + try: + message = self.FindMessageTypeByName(top_name) + assert (sub_name in message.extensions_by_name or + sub_name in message.fields_by_name or + sub_name in message.enum_values_by_name) + return message.file + except (KeyError, AssertionError): + raise KeyError('Cannot find a file containing %s' % symbol) + + def FindMessageTypeByName(self, full_name): + """Loads the named descriptor from the pool. + + Args: + full_name (str): The full name of the descriptor to load. + + Returns: + Descriptor: The descriptor for the named type. + + Raises: + KeyError: if the message cannot be found in the pool. + """ + + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._descriptors[full_name] + + def FindEnumTypeByName(self, full_name): + """Loads the named enum descriptor from the pool. + + Args: + full_name (str): The full name of the enum descriptor to load. + + Returns: + EnumDescriptor: The enum descriptor for the named type. + + Raises: + KeyError: if the enum cannot be found in the pool. + """ + + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._enum_descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._enum_descriptors[full_name] + + def FindFieldByName(self, full_name): + """Loads the named field descriptor from the pool. + + Args: + full_name (str): The full name of the field descriptor to load. + + Returns: + FieldDescriptor: The field descriptor for the named field. + + Raises: + KeyError: if the field cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + message_name, _, field_name = full_name.rpartition('.') + message_descriptor = self.FindMessageTypeByName(message_name) + return message_descriptor.fields_by_name[field_name] + + def FindOneofByName(self, full_name): + """Loads the named oneof descriptor from the pool. + + Args: + full_name (str): The full name of the oneof descriptor to load. + + Returns: + OneofDescriptor: The oneof descriptor for the named oneof. + + Raises: + KeyError: if the oneof cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + message_name, _, oneof_name = full_name.rpartition('.') + message_descriptor = self.FindMessageTypeByName(message_name) + return message_descriptor.oneofs_by_name[oneof_name] + + def FindExtensionByName(self, full_name): + """Loads the named extension descriptor from the pool. + + Args: + full_name (str): The full name of the extension descriptor to load. + + Returns: + FieldDescriptor: The field descriptor for the named extension. + + Raises: + KeyError: if the extension cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + try: + # The proto compiler does not give any link between the FileDescriptor + # and top-level extensions unless the FileDescriptorProto is added to + # the DescriptorDatabase, but this can impact memory usage. + # So we registered these extensions by name explicitly. + return self._toplevel_extensions[full_name] + except KeyError: + pass + message_name, _, extension_name = full_name.rpartition('.') + try: + # Most extensions are nested inside a message. + scope = self.FindMessageTypeByName(message_name) + except KeyError: + # Some extensions are defined at file scope. + scope = self._FindFileContainingSymbolInDb(full_name) + return scope.extensions_by_name[extension_name] + + def FindExtensionByNumber(self, message_descriptor, number): + """Gets the extension of the specified message with the specified number. + + Extensions have to be registered to this pool by calling :func:`Add` or + :func:`AddExtensionDescriptor`. + + Args: + message_descriptor (Descriptor): descriptor of the extended message. + number (int): Number of the extension field. + + Returns: + FieldDescriptor: The descriptor for the extension. + + Raises: + KeyError: when no extension with the given number is known for the + specified message. + """ + try: + return self._extensions_by_number[message_descriptor][number] + except KeyError: + self._TryLoadExtensionFromDB(message_descriptor, number) + return self._extensions_by_number[message_descriptor][number] + + def FindAllExtensions(self, message_descriptor): + """Gets all the known extensions of a given message. + + Extensions have to be registered to this pool by build related + :func:`Add` or :func:`AddExtensionDescriptor`. + + Args: + message_descriptor (Descriptor): Descriptor of the extended message. + + Returns: + list[FieldDescriptor]: Field descriptors describing the extensions. + """ + # Fallback to descriptor db if FindAllExtensionNumbers is provided. + if self._descriptor_db and hasattr( + self._descriptor_db, 'FindAllExtensionNumbers'): + full_name = message_descriptor.full_name + all_numbers = self._descriptor_db.FindAllExtensionNumbers(full_name) + for number in all_numbers: + if number in self._extensions_by_number[message_descriptor]: + continue + self._TryLoadExtensionFromDB(message_descriptor, number) + + return list(self._extensions_by_number[message_descriptor].values()) + + def _TryLoadExtensionFromDB(self, message_descriptor, number): + """Try to Load extensions from descriptor db. + + Args: + message_descriptor: descriptor of the extended message. + number: the extension number that needs to be loaded. + """ + if not self._descriptor_db: + return + # Only supported when FindFileContainingExtension is provided. + if not hasattr( + self._descriptor_db, 'FindFileContainingExtension'): + return + + full_name = message_descriptor.full_name + file_proto = self._descriptor_db.FindFileContainingExtension( + full_name, number) + + if file_proto is None: + return + + try: + self._ConvertFileProtoToFileDescriptor(file_proto) + except: + warn_msg = ('Unable to load proto file %s for extension number %d.' % + (file_proto.name, number)) + warnings.warn(warn_msg, RuntimeWarning) + + def FindServiceByName(self, full_name): + """Loads the named service descriptor from the pool. + + Args: + full_name (str): The full name of the service descriptor to load. + + Returns: + ServiceDescriptor: The service descriptor for the named service. + + Raises: + KeyError: if the service cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._service_descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._service_descriptors[full_name] + + def FindMethodByName(self, full_name): + """Loads the named service method descriptor from the pool. + + Args: + full_name (str): The full name of the method descriptor to load. + + Returns: + MethodDescriptor: The method descriptor for the service method. + + Raises: + KeyError: if the method cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + service_name, _, method_name = full_name.rpartition('.') + service_descriptor = self.FindServiceByName(service_name) + return service_descriptor.methods_by_name[method_name] + + def _FindFileContainingSymbolInDb(self, symbol): + """Finds the file in descriptor DB containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: The file that contains the specified symbol. + + Raises: + KeyError: if the file cannot be found in the descriptor database. + """ + try: + file_proto = self._internal_db.FindFileContainingSymbol(symbol) + except KeyError as error: + if self._descriptor_db: + file_proto = self._descriptor_db.FindFileContainingSymbol(symbol) + else: + raise error + if not file_proto: + raise KeyError('Cannot find a file containing %s' % symbol) + return self._ConvertFileProtoToFileDescriptor(file_proto) + + def _ConvertFileProtoToFileDescriptor(self, file_proto): + """Creates a FileDescriptor from a proto or returns a cached copy. + + This method also has the side effect of loading all the symbols found in + the file into the appropriate dictionaries in the pool. + + Args: + file_proto: The proto to convert. + + Returns: + A FileDescriptor matching the passed in proto. + """ + if file_proto.name not in self._file_descriptors: + built_deps = list(self._GetDeps(file_proto.dependency)) + direct_deps = [self.FindFileByName(n) for n in file_proto.dependency] + public_deps = [direct_deps[i] for i in file_proto.public_dependency] + + file_descriptor = descriptor.FileDescriptor( + pool=self, + name=file_proto.name, + package=file_proto.package, + syntax=file_proto.syntax, + options=_OptionsOrNone(file_proto), + serialized_pb=file_proto.SerializeToString(), + dependencies=direct_deps, + public_dependencies=public_deps, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + scope = {} + + # This loop extracts all the message and enum types from all the + # dependencies of the file_proto. This is necessary to create the + # scope of available message types when defining the passed in + # file proto. + for dependency in built_deps: + scope.update(self._ExtractSymbols( + dependency.message_types_by_name.values())) + scope.update((_PrefixWithDot(enum.full_name), enum) + for enum in dependency.enum_types_by_name.values()) + + for message_type in file_proto.message_type: + message_desc = self._ConvertMessageDescriptor( + message_type, file_proto.package, file_descriptor, scope, + file_proto.syntax) + file_descriptor.message_types_by_name[message_desc.name] = ( + message_desc) + + for enum_type in file_proto.enum_type: + file_descriptor.enum_types_by_name[enum_type.name] = ( + self._ConvertEnumDescriptor(enum_type, file_proto.package, + file_descriptor, None, scope, True)) + + for index, extension_proto in enumerate(file_proto.extension): + extension_desc = self._MakeFieldDescriptor( + extension_proto, file_proto.package, index, file_descriptor, + is_extension=True) + extension_desc.containing_type = self._GetTypeFromScope( + file_descriptor.package, extension_proto.extendee, scope) + self._SetFieldType(extension_proto, extension_desc, + file_descriptor.package, scope) + file_descriptor.extensions_by_name[extension_desc.name] = ( + extension_desc) + self._file_desc_by_toplevel_extension[extension_desc.full_name] = ( + file_descriptor) + + for desc_proto in file_proto.message_type: + self._SetAllFieldTypes(file_proto.package, desc_proto, scope) + + if file_proto.package: + desc_proto_prefix = _PrefixWithDot(file_proto.package) + else: + desc_proto_prefix = '' + + for desc_proto in file_proto.message_type: + desc = self._GetTypeFromScope( + desc_proto_prefix, desc_proto.name, scope) + file_descriptor.message_types_by_name[desc_proto.name] = desc + + for index, service_proto in enumerate(file_proto.service): + file_descriptor.services_by_name[service_proto.name] = ( + self._MakeServiceDescriptor(service_proto, index, scope, + file_proto.package, file_descriptor)) + + self._file_descriptors[file_proto.name] = file_descriptor + + # Add extensions to the pool + file_desc = self._file_descriptors[file_proto.name] + for extension in file_desc.extensions_by_name.values(): + self._AddExtensionDescriptor(extension) + for message_type in file_desc.message_types_by_name.values(): + for extension in message_type.extensions: + self._AddExtensionDescriptor(extension) + + return file_desc + + def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None, + scope=None, syntax=None): + """Adds the proto to the pool in the specified package. + + Args: + desc_proto: The descriptor_pb2.DescriptorProto protobuf message. + package: The package the proto should be located in. + file_desc: The file containing this message. + scope: Dict mapping short and full symbols to message and enum types. + syntax: string indicating syntax of the file ("proto2" or "proto3") + + Returns: + The added descriptor. + """ + + if package: + desc_name = '.'.join((package, desc_proto.name)) + else: + desc_name = desc_proto.name + + if file_desc is None: + file_name = None + else: + file_name = file_desc.name + + if scope is None: + scope = {} + + nested = [ + self._ConvertMessageDescriptor( + nested, desc_name, file_desc, scope, syntax) + for nested in desc_proto.nested_type] + enums = [ + self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, + scope, False) + for enum in desc_proto.enum_type] + fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc) + for index, field in enumerate(desc_proto.field)] + extensions = [ + self._MakeFieldDescriptor(extension, desc_name, index, file_desc, + is_extension=True) + for index, extension in enumerate(desc_proto.extension)] + oneofs = [ + # pylint: disable=g-complex-comprehension + descriptor.OneofDescriptor( + desc.name, + '.'.join((desc_name, desc.name)), + index, + None, + [], + _OptionsOrNone(desc), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + for index, desc in enumerate(desc_proto.oneof_decl) + ] + extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range] + if extension_ranges: + is_extendable = True + else: + is_extendable = False + desc = descriptor.Descriptor( + name=desc_proto.name, + full_name=desc_name, + filename=file_name, + containing_type=None, + fields=fields, + oneofs=oneofs, + nested_types=nested, + enum_types=enums, + extensions=extensions, + options=_OptionsOrNone(desc_proto), + is_extendable=is_extendable, + extension_ranges=extension_ranges, + file=file_desc, + serialized_start=None, + serialized_end=None, + syntax=syntax, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + for nested in desc.nested_types: + nested.containing_type = desc + for enum in desc.enum_types: + enum.containing_type = desc + for field_index, field_desc in enumerate(desc_proto.field): + if field_desc.HasField('oneof_index'): + oneof_index = field_desc.oneof_index + oneofs[oneof_index].fields.append(fields[field_index]) + fields[field_index].containing_oneof = oneofs[oneof_index] + + scope[_PrefixWithDot(desc_name)] = desc + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._descriptors[desc_name] = desc + return desc + + def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None, + containing_type=None, scope=None, top_level=False): + """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. + + Args: + enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. + package: Optional package name for the new message EnumDescriptor. + file_desc: The file containing the enum descriptor. + containing_type: The type containing this enum. + scope: Scope containing available types. + top_level: If True, the enum is a top level symbol. If False, the enum + is defined inside a message. + + Returns: + The added descriptor + """ + + if package: + enum_name = '.'.join((package, enum_proto.name)) + else: + enum_name = enum_proto.name + + if file_desc is None: + file_name = None + else: + file_name = file_desc.name + + values = [self._MakeEnumValueDescriptor(value, index) + for index, value in enumerate(enum_proto.value)] + desc = descriptor.EnumDescriptor(name=enum_proto.name, + full_name=enum_name, + filename=file_name, + file=file_desc, + values=values, + containing_type=containing_type, + options=_OptionsOrNone(enum_proto), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + scope['.%s' % enum_name] = desc + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._enum_descriptors[enum_name] = desc + + # Add top level enum values. + if top_level: + for value in values: + full_name = _NormalizeFullyQualifiedName( + '.'.join((package, value.name))) + self._CheckConflictRegister(value, full_name, file_name) + self._top_enum_values[full_name] = value + + return desc + + def _MakeFieldDescriptor(self, field_proto, message_name, index, + file_desc, is_extension=False): + """Creates a field descriptor from a FieldDescriptorProto. + + For message and enum type fields, this method will do a look up + in the pool for the appropriate descriptor for that type. If it + is unavailable, it will fall back to the _source function to + create it. If this type is still unavailable, construction will + fail. + + Args: + field_proto: The proto describing the field. + message_name: The name of the containing message. + index: Index of the field + file_desc: The file containing the field descriptor. + is_extension: Indication that this field is for an extension. + + Returns: + An initialized FieldDescriptor object + """ + + if message_name: + full_name = '.'.join((message_name, field_proto.name)) + else: + full_name = field_proto.name + + if field_proto.json_name: + json_name = field_proto.json_name + else: + json_name = None + + return descriptor.FieldDescriptor( + name=field_proto.name, + full_name=full_name, + index=index, + number=field_proto.number, + type=field_proto.type, + cpp_type=None, + message_type=None, + enum_type=None, + containing_type=None, + label=field_proto.label, + has_default_value=False, + default_value=None, + is_extension=is_extension, + extension_scope=None, + options=_OptionsOrNone(field_proto), + json_name=json_name, + file=file_desc, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _SetAllFieldTypes(self, package, desc_proto, scope): + """Sets all the descriptor's fields's types. + + This method also sets the containing types on any extensions. + + Args: + package: The current package of desc_proto. + desc_proto: The message descriptor to update. + scope: Enclosing scope of available types. + """ + + package = _PrefixWithDot(package) + + main_desc = self._GetTypeFromScope(package, desc_proto.name, scope) + + if package == '.': + nested_package = _PrefixWithDot(desc_proto.name) + else: + nested_package = '.'.join([package, desc_proto.name]) + + for field_proto, field_desc in zip(desc_proto.field, main_desc.fields): + self._SetFieldType(field_proto, field_desc, nested_package, scope) + + for extension_proto, extension_desc in ( + zip(desc_proto.extension, main_desc.extensions)): + extension_desc.containing_type = self._GetTypeFromScope( + nested_package, extension_proto.extendee, scope) + self._SetFieldType(extension_proto, extension_desc, nested_package, scope) + + for nested_type in desc_proto.nested_type: + self._SetAllFieldTypes(nested_package, nested_type, scope) + + def _SetFieldType(self, field_proto, field_desc, package, scope): + """Sets the field's type, cpp_type, message_type and enum_type. + + Args: + field_proto: Data about the field in proto format. + field_desc: The descriptor to modify. + package: The package the field's container is in. + scope: Enclosing scope of available types. + """ + if field_proto.type_name: + desc = self._GetTypeFromScope(package, field_proto.type_name, scope) + else: + desc = None + + if not field_proto.HasField('type'): + if isinstance(desc, descriptor.Descriptor): + field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE + else: + field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM + + field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType( + field_proto.type) + + if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE + or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP): + field_desc.message_type = desc + + if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.enum_type = desc + + if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED: + field_desc.has_default_value = False + field_desc.default_value = [] + elif field_proto.HasField('default_value'): + field_desc.has_default_value = True + if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or + field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): + field_desc.default_value = float(field_proto.default_value) + elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: + field_desc.default_value = field_proto.default_value + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: + field_desc.default_value = field_proto.default_value.lower() == 'true' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.default_value = field_desc.enum_type.values_by_name[ + field_proto.default_value].number + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: + field_desc.default_value = text_encoding.CUnescape( + field_proto.default_value) + elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE: + field_desc.default_value = None + else: + # All other types are of the "int" type. + field_desc.default_value = int(field_proto.default_value) + else: + field_desc.has_default_value = False + if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or + field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): + field_desc.default_value = 0.0 + elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: + field_desc.default_value = u'' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: + field_desc.default_value = False + elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.default_value = field_desc.enum_type.values[0].number + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: + field_desc.default_value = b'' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE: + field_desc.default_value = None + elif field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP: + field_desc.default_value = None + else: + # All other types are of the "int" type. + field_desc.default_value = 0 + + field_desc.type = field_proto.type + + def _MakeEnumValueDescriptor(self, value_proto, index): + """Creates a enum value descriptor object from a enum value proto. + + Args: + value_proto: The proto describing the enum value. + index: The index of the enum value. + + Returns: + An initialized EnumValueDescriptor object. + """ + + return descriptor.EnumValueDescriptor( + name=value_proto.name, + index=index, + number=value_proto.number, + options=_OptionsOrNone(value_proto), + type=None, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _MakeServiceDescriptor(self, service_proto, service_index, scope, + package, file_desc): + """Make a protobuf ServiceDescriptor given a ServiceDescriptorProto. + + Args: + service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message. + service_index: The index of the service in the File. + scope: Dict mapping short and full symbols to message and enum types. + package: Optional package name for the new message EnumDescriptor. + file_desc: The file containing the service descriptor. + + Returns: + The added descriptor. + """ + + if package: + service_name = '.'.join((package, service_proto.name)) + else: + service_name = service_proto.name + + methods = [self._MakeMethodDescriptor(method_proto, service_name, package, + scope, index) + for index, method_proto in enumerate(service_proto.method)] + desc = descriptor.ServiceDescriptor( + name=service_proto.name, + full_name=service_name, + index=service_index, + methods=methods, + options=_OptionsOrNone(service_proto), + file=file_desc, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._service_descriptors[service_name] = desc + return desc + + def _MakeMethodDescriptor(self, method_proto, service_name, package, scope, + index): + """Creates a method descriptor from a MethodDescriptorProto. + + Args: + method_proto: The proto describing the method. + service_name: The name of the containing service. + package: Optional package name to look up for types. + scope: Scope containing available types. + index: Index of the method in the service. + + Returns: + An initialized MethodDescriptor object. + """ + full_name = '.'.join((service_name, method_proto.name)) + input_type = self._GetTypeFromScope( + package, method_proto.input_type, scope) + output_type = self._GetTypeFromScope( + package, method_proto.output_type, scope) + return descriptor.MethodDescriptor( + name=method_proto.name, + full_name=full_name, + index=index, + containing_service=None, + input_type=input_type, + output_type=output_type, + client_streaming=method_proto.client_streaming, + server_streaming=method_proto.server_streaming, + options=_OptionsOrNone(method_proto), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _ExtractSymbols(self, descriptors): + """Pulls out all the symbols from descriptor protos. + + Args: + descriptors: The messages to extract descriptors from. + Yields: + A two element tuple of the type name and descriptor object. + """ + + for desc in descriptors: + yield (_PrefixWithDot(desc.full_name), desc) + for symbol in self._ExtractSymbols(desc.nested_types): + yield symbol + for enum in desc.enum_types: + yield (_PrefixWithDot(enum.full_name), enum) + + def _GetDeps(self, dependencies, visited=None): + """Recursively finds dependencies for file protos. + + Args: + dependencies: The names of the files being depended on. + visited: The names of files already found. + + Yields: + Each direct and indirect dependency. + """ + + visited = visited or set() + for dependency in dependencies: + if dependency not in visited: + visited.add(dependency) + dep_desc = self.FindFileByName(dependency) + yield dep_desc + public_files = [d.name for d in dep_desc.public_dependencies] + yield from self._GetDeps(public_files, visited) + + def _GetTypeFromScope(self, package, type_name, scope): + """Finds a given type name in the current scope. + + Args: + package: The package the proto should be located in. + type_name: The name of the type to be found in the scope. + scope: Dict mapping short and full symbols to message and enum types. + + Returns: + The descriptor for the requested type. + """ + if type_name not in scope: + components = _PrefixWithDot(package).split('.') + while components: + possible_match = '.'.join(components + [type_name]) + if possible_match in scope: + type_name = possible_match + break + else: + components.pop(-1) + return scope[type_name] + + +def _PrefixWithDot(name): + return name if name.startswith('.') else '.%s' % name + + +if _USE_C_DESCRIPTORS: + # TODO(amauryfa): This pool could be constructed from Python code, when we + # support a flag like 'use_cpp_generated_pool=True'. + # pylint: disable=protected-access + _DEFAULT = descriptor._message.default_pool +else: + _DEFAULT = DescriptorPool() + + +def Default(): + return _DEFAULT diff --git a/openpype/hosts/hiero/vendor/google/protobuf/duration_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/duration_pb2.py new file mode 100644 index 0000000000..a8ecc07bdf --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/duration_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/duration.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _DURATION._serialized_start=51 + _DURATION._serialized_end=93 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/empty_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/empty_pb2.py new file mode 100644 index 0000000000..0b4d554db3 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/empty_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/empty.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _EMPTY._serialized_start=48 + _EMPTY._serialized_end=55 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/field_mask_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/field_mask_pb2.py new file mode 100644 index 0000000000..80a4e96e59 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/field_mask_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/field_mask.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _FIELDMASK._serialized_start=53 + _FIELDMASK._serialized_end=79 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/__init__.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/_parameterized.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/_parameterized.py new file mode 100644 index 0000000000..afdbb78c36 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/_parameterized.py @@ -0,0 +1,443 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Adds support for parameterized tests to Python's unittest TestCase class. + +A parameterized test is a method in a test case that is invoked with different +argument tuples. + +A simple example: + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + (1, 2, 3), + (4, 5, 9), + (1, 1, 3)) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + + +Each invocation is a separate test case and properly isolated just +like a normal test method, with its own setUp/tearDown cycle. In the +example above, there are three separate testcases, one of which will +fail due to an assertion error (1 + 1 != 3). + +Parameters for individual test cases can be tuples (with positional parameters) +or dictionaries (with named parameters): + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + {'op1': 1, 'op2': 2, 'result': 3}, + {'op1': 4, 'op2': 5, 'result': 9}, + ) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + +If a parameterized test fails, the error message will show the +original test name (which is modified internally) and the arguments +for the specific invocation, which are part of the string returned by +the shortDescription() method on test cases. + +The id method of the test, used internally by the unittest framework, +is also modified to show the arguments. To make sure that test names +stay the same across several invocations, object representations like + + >>> class Foo(object): + ... pass + >>> repr(Foo()) + '<__main__.Foo object at 0x23d8610>' + +are turned into '<__main__.Foo>'. For even more descriptive names, +especially in test logs, you can use the named_parameters decorator. In +this case, only tuples are supported, and the first parameters has to +be a string (or an object that returns an apt name when converted via +str()): + + class NamedExample(parameterized.TestCase): + @parameterized.named_parameters( + ('Normal', 'aa', 'aaa', True), + ('EmptyPrefix', '', 'abc', True), + ('BothEmpty', '', '', True)) + def testStartsWith(self, prefix, string, result): + self.assertEqual(result, strings.startswith(prefix)) + +Named tests also have the benefit that they can be run individually +from the command line: + + $ testmodule.py NamedExample.testStartsWithNormal + . + -------------------------------------------------------------------- + Ran 1 test in 0.000s + + OK + +Parameterized Classes +===================== +If invocation arguments are shared across test methods in a single +TestCase class, instead of decorating all test methods +individually, the class itself can be decorated: + + @parameterized.parameters( + (1, 2, 3) + (4, 5, 9)) + class ArithmeticTest(parameterized.TestCase): + def testAdd(self, arg1, arg2, result): + self.assertEqual(arg1 + arg2, result) + + def testSubtract(self, arg2, arg2, result): + self.assertEqual(result - arg1, arg2) + +Inputs from Iterables +===================== +If parameters should be shared across several test cases, or are dynamically +created from other sources, a single non-tuple iterable can be passed into +the decorator. This iterable will be used to obtain the test cases: + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + c.op1, c.op2, c.result for c in testcases + ) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + + +Single-Argument Test Methods +============================ +If a test method takes only one argument, the single argument does not need to +be wrapped into a tuple: + + class NegativeNumberExample(parameterized.TestCase): + @parameterized.parameters( + -1, -3, -4, -5 + ) + def testIsNegative(self, arg): + self.assertTrue(IsNegative(arg)) +""" + +__author__ = 'tmarek@google.com (Torsten Marek)' + +import functools +import re +import types +import unittest +import uuid + +try: + # Since python 3 + import collections.abc as collections_abc +except ImportError: + # Won't work after python 3.8 + import collections as collections_abc + +ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>') +_SEPARATOR = uuid.uuid1().hex +_FIRST_ARG = object() +_ARGUMENT_REPR = object() + + +def _CleanRepr(obj): + return ADDR_RE.sub(r'<\1>', repr(obj)) + + +# Helper function formerly from the unittest module, removed from it in +# Python 2.7. +def _StrClass(cls): + return '%s.%s' % (cls.__module__, cls.__name__) + + +def _NonStringIterable(obj): + return (isinstance(obj, collections_abc.Iterable) and + not isinstance(obj, str)) + + +def _FormatParameterList(testcase_params): + if isinstance(testcase_params, collections_abc.Mapping): + return ', '.join('%s=%s' % (argname, _CleanRepr(value)) + for argname, value in testcase_params.items()) + elif _NonStringIterable(testcase_params): + return ', '.join(map(_CleanRepr, testcase_params)) + else: + return _FormatParameterList((testcase_params,)) + + +class _ParameterizedTestIter(object): + """Callable and iterable class for producing new test cases.""" + + def __init__(self, test_method, testcases, naming_type): + """Returns concrete test functions for a test and a list of parameters. + + The naming_type is used to determine the name of the concrete + functions as reported by the unittest framework. If naming_type is + _FIRST_ARG, the testcases must be tuples, and the first element must + have a string representation that is a valid Python identifier. + + Args: + test_method: The decorated test method. + testcases: (list of tuple/dict) A list of parameter + tuples/dicts for individual test invocations. + naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR. + """ + self._test_method = test_method + self.testcases = testcases + self._naming_type = naming_type + + def __call__(self, *args, **kwargs): + raise RuntimeError('You appear to be running a parameterized test case ' + 'without having inherited from parameterized.' + 'TestCase. This is bad because none of ' + 'your test cases are actually being run.') + + def __iter__(self): + test_method = self._test_method + naming_type = self._naming_type + + def MakeBoundParamTest(testcase_params): + @functools.wraps(test_method) + def BoundParamTest(self): + if isinstance(testcase_params, collections_abc.Mapping): + test_method(self, **testcase_params) + elif _NonStringIterable(testcase_params): + test_method(self, *testcase_params) + else: + test_method(self, testcase_params) + + if naming_type is _FIRST_ARG: + # Signal the metaclass that the name of the test function is unique + # and descriptive. + BoundParamTest.__x_use_name__ = True + BoundParamTest.__name__ += str(testcase_params[0]) + testcase_params = testcase_params[1:] + elif naming_type is _ARGUMENT_REPR: + # __x_extra_id__ is used to pass naming information to the __new__ + # method of TestGeneratorMetaclass. + # The metaclass will make sure to create a unique, but nondescriptive + # name for this test. + BoundParamTest.__x_extra_id__ = '(%s)' % ( + _FormatParameterList(testcase_params),) + else: + raise RuntimeError('%s is not a valid naming type.' % (naming_type,)) + + BoundParamTest.__doc__ = '%s(%s)' % ( + BoundParamTest.__name__, _FormatParameterList(testcase_params)) + if test_method.__doc__: + BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,) + return BoundParamTest + return (MakeBoundParamTest(c) for c in self.testcases) + + +def _IsSingletonList(testcases): + """True iff testcases contains only a single non-tuple element.""" + return len(testcases) == 1 and not isinstance(testcases[0], tuple) + + +def _ModifyClass(class_object, testcases, naming_type): + assert not getattr(class_object, '_id_suffix', None), ( + 'Cannot add parameters to %s,' + ' which already has parameterized methods.' % (class_object,)) + class_object._id_suffix = id_suffix = {} + # We change the size of __dict__ while we iterate over it, + # which Python 3.x will complain about, so use copy(). + for name, obj in class_object.__dict__.copy().items(): + if (name.startswith(unittest.TestLoader.testMethodPrefix) + and isinstance(obj, types.FunctionType)): + delattr(class_object, name) + methods = {} + _UpdateClassDictForParamTestCase( + methods, id_suffix, name, + _ParameterizedTestIter(obj, testcases, naming_type)) + for name, meth in methods.items(): + setattr(class_object, name, meth) + + +def _ParameterDecorator(naming_type, testcases): + """Implementation of the parameterization decorators. + + Args: + naming_type: The naming type. + testcases: Testcase parameters. + + Returns: + A function for modifying the decorated object. + """ + def _Apply(obj): + if isinstance(obj, type): + _ModifyClass( + obj, + list(testcases) if not isinstance(testcases, collections_abc.Sequence) + else testcases, + naming_type) + return obj + else: + return _ParameterizedTestIter(obj, testcases, naming_type) + + if _IsSingletonList(testcases): + assert _NonStringIterable(testcases[0]), ( + 'Single parameter argument must be a non-string iterable') + testcases = testcases[0] + + return _Apply + + +def parameters(*testcases): # pylint: disable=invalid-name + """A decorator for creating parameterized tests. + + See the module docstring for a usage example. + Args: + *testcases: Parameters for the decorated method, either a single + iterable, or a list of tuples/dicts/objects (for tests + with only one argument). + + Returns: + A test generator to be handled by TestGeneratorMetaclass. + """ + return _ParameterDecorator(_ARGUMENT_REPR, testcases) + + +def named_parameters(*testcases): # pylint: disable=invalid-name + """A decorator for creating parameterized tests. + + See the module docstring for a usage example. The first element of + each parameter tuple should be a string and will be appended to the + name of the test method. + + Args: + *testcases: Parameters for the decorated method, either a single + iterable, or a list of tuples. + + Returns: + A test generator to be handled by TestGeneratorMetaclass. + """ + return _ParameterDecorator(_FIRST_ARG, testcases) + + +class TestGeneratorMetaclass(type): + """Metaclass for test cases with test generators. + + A test generator is an iterable in a testcase that produces callables. These + callables must be single-argument methods. These methods are injected into + the class namespace and the original iterable is removed. If the name of the + iterable conforms to the test pattern, the injected methods will be picked + up as tests by the unittest framework. + + In general, it is supposed to be used in conjunction with the + parameters decorator. + """ + + def __new__(mcs, class_name, bases, dct): + dct['_id_suffix'] = id_suffix = {} + for name, obj in dct.copy().items(): + if (name.startswith(unittest.TestLoader.testMethodPrefix) and + _NonStringIterable(obj)): + iterator = iter(obj) + dct.pop(name) + _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator) + + return type.__new__(mcs, class_name, bases, dct) + + +def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator): + """Adds individual test cases to a dictionary. + + Args: + dct: The target dictionary. + id_suffix: The dictionary for mapping names to test IDs. + name: The original name of the test case. + iterator: The iterator generating the individual test cases. + """ + for idx, func in enumerate(iterator): + assert callable(func), 'Test generators must yield callables, got %r' % ( + func,) + if getattr(func, '__x_use_name__', False): + new_name = func.__name__ + else: + new_name = '%s%s%d' % (name, _SEPARATOR, idx) + assert new_name not in dct, ( + 'Name of parameterized test case "%s" not unique' % (new_name,)) + dct[new_name] = func + id_suffix[new_name] = getattr(func, '__x_extra_id__', '') + + +class TestCase(unittest.TestCase, metaclass=TestGeneratorMetaclass): + """Base class for test cases using the parameters decorator.""" + + def _OriginalName(self): + return self._testMethodName.split(_SEPARATOR)[0] + + def __str__(self): + return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__)) + + def id(self): # pylint: disable=invalid-name + """Returns the descriptive ID of the test. + + This is used internally by the unittesting framework to get a name + for the test to be used in reports. + + Returns: + The test id. + """ + return '%s.%s%s' % (_StrClass(self.__class__), + self._OriginalName(), + self._id_suffix.get(self._testMethodName, '')) + + +def CoopTestCase(other_base_class): + """Returns a new base class with a cooperative metaclass base. + + This enables the TestCase to be used in combination + with other base classes that have custom metaclasses, such as + mox.MoxTestBase. + + Only works with metaclasses that do not override type.__new__. + + Example: + + import google3 + import mox + + from google3.testing.pybase import parameterized + + class ExampleTest(parameterized.CoopTestCase(mox.MoxTestBase)): + ... + + Args: + other_base_class: (class) A test case base class. + + Returns: + A new class object. + """ + metaclass = type( + 'CoopMetaclass', + (other_base_class.__metaclass__, + TestGeneratorMetaclass), {}) + return metaclass( + 'CoopTestCase', + (other_base_class, TestCase), {}) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/api_implementation.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/api_implementation.py new file mode 100644 index 0000000000..7fef237670 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/api_implementation.py @@ -0,0 +1,112 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Determine which implementation of the protobuf API is used in this process. +""" + +import os +import sys +import warnings + +try: + # pylint: disable=g-import-not-at-top + from google.protobuf.internal import _api_implementation + # The compile-time constants in the _api_implementation module can be used to + # switch to a certain implementation of the Python API at build time. + _api_version = _api_implementation.api_version +except ImportError: + _api_version = -1 # Unspecified by compiler flags. + +if _api_version == 1: + raise ValueError('api_version=1 is no longer supported.') + + +_default_implementation_type = ('cpp' if _api_version > 0 else 'python') + + +# This environment variable can be used to switch to a certain implementation +# of the Python API, overriding the compile-time constants in the +# _api_implementation module. Right now only 'python' and 'cpp' are valid +# values. Any other value will be ignored. +_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION', + _default_implementation_type) + +if _implementation_type != 'python': + _implementation_type = 'cpp' + +if 'PyPy' in sys.version and _implementation_type == 'cpp': + warnings.warn('PyPy does not work yet with cpp protocol buffers. ' + 'Falling back to the python implementation.') + _implementation_type = 'python' + + +# Detect if serialization should be deterministic by default +try: + # The presence of this module in a build allows the proto implementation to + # be upgraded merely via build deps. + # + # NOTE: Merely importing this automatically enables deterministic proto + # serialization for C++ code, but we still need to export it as a boolean so + # that we can do the same for `_implementation_type == 'python'`. + # + # NOTE2: It is possible for C++ code to enable deterministic serialization by + # default _without_ affecting Python code, if the C++ implementation is not in + # use by this module. That is intended behavior, so we don't actually expose + # this boolean outside of this module. + # + # pylint: disable=g-import-not-at-top,unused-import + from google.protobuf import enable_deterministic_proto_serialization + _python_deterministic_proto_serialization = True +except ImportError: + _python_deterministic_proto_serialization = False + + +# Usage of this function is discouraged. Clients shouldn't care which +# implementation of the API is in use. Note that there is no guarantee +# that differences between APIs will be maintained. +# Please don't use this function if possible. +def Type(): + return _implementation_type + + +def _SetType(implementation_type): + """Never use! Only for protobuf benchmark.""" + global _implementation_type + _implementation_type = implementation_type + + +# See comment on 'Type' above. +def Version(): + return 2 + + +# For internal use only +def IsPythonDefaultSerializationDeterministic(): + return _python_deterministic_proto_serialization diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/builder.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/builder.py new file mode 100644 index 0000000000..64353ee4af --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/builder.py @@ -0,0 +1,130 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Builds descriptors, message classes and services for generated _pb2.py. + +This file is only called in python generated _pb2.py files. It builds +descriptors, message classes and services that users can directly use +in generated code. +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +_sym_db = _symbol_database.Default() + + +def BuildMessageAndEnumDescriptors(file_des, module): + """Builds message and enum descriptors. + + Args: + file_des: FileDescriptor of the .proto file + module: Generated _pb2 module + """ + + def BuildNestedDescriptors(msg_des, prefix): + for (name, nested_msg) in msg_des.nested_types_by_name.items(): + module_name = prefix + name.upper() + module[module_name] = nested_msg + BuildNestedDescriptors(nested_msg, module_name + '_') + for enum_des in msg_des.enum_types: + module[prefix + enum_des.name.upper()] = enum_des + + for (name, msg_des) in file_des.message_types_by_name.items(): + module_name = '_' + name.upper() + module[module_name] = msg_des + BuildNestedDescriptors(msg_des, module_name + '_') + + +def BuildTopDescriptorsAndMessages(file_des, module_name, module): + """Builds top level descriptors and message classes. + + Args: + file_des: FileDescriptor of the .proto file + module_name: str, the name of generated _pb2 module + module: Generated _pb2 module + """ + + def BuildMessage(msg_des): + create_dict = {} + for (name, nested_msg) in msg_des.nested_types_by_name.items(): + create_dict[name] = BuildMessage(nested_msg) + create_dict['DESCRIPTOR'] = msg_des + create_dict['__module__'] = module_name + message_class = _reflection.GeneratedProtocolMessageType( + msg_des.name, (_message.Message,), create_dict) + _sym_db.RegisterMessage(message_class) + return message_class + + # top level enums + for (name, enum_des) in file_des.enum_types_by_name.items(): + module['_' + name.upper()] = enum_des + module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des) + for enum_value in enum_des.values: + module[enum_value.name] = enum_value.number + + # top level extensions + for (name, extension_des) in file_des.extensions_by_name.items(): + module[name.upper() + '_FIELD_NUMBER'] = extension_des.number + module[name] = extension_des + + # services + for (name, service) in file_des.services_by_name.items(): + module['_' + name.upper()] = service + + # Build messages. + for (name, msg_des) in file_des.message_types_by_name.items(): + module[name] = BuildMessage(msg_des) + + +def BuildServices(file_des, module_name, module): + """Builds services classes and services stub class. + + Args: + file_des: FileDescriptor of the .proto file + module_name: str, the name of generated _pb2 module + module: Generated _pb2 module + """ + # pylint: disable=g-import-not-at-top + from google.protobuf import service as _service + from google.protobuf import service_reflection + # pylint: enable=g-import-not-at-top + for (name, service) in file_des.services_by_name.items(): + module[name] = service_reflection.GeneratedServiceType( + name, (_service.Service,), + dict(DESCRIPTOR=service, __module__=module_name)) + stub_name = name + '_Stub' + module[stub_name] = service_reflection.GeneratedServiceStubType( + stub_name, (module[name],), + dict(DESCRIPTOR=service, __module__=module_name)) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/containers.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/containers.py new file mode 100644 index 0000000000..29fbb53d2f --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/containers.py @@ -0,0 +1,710 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains container classes to represent different protocol buffer types. + +This file defines container classes which represent categories of protocol +buffer field types which need extra maintenance. Currently these categories +are: + +- Repeated scalar fields - These are all repeated fields which aren't + composite (e.g. they are of simple types like int32, string, etc). +- Repeated composite fields - Repeated fields which are composite. This + includes groups and nested messages. +""" + +import collections.abc +import copy +import pickle +from typing import ( + Any, + Iterable, + Iterator, + List, + MutableMapping, + MutableSequence, + NoReturn, + Optional, + Sequence, + TypeVar, + Union, + overload, +) + + +_T = TypeVar('_T') +_K = TypeVar('_K') +_V = TypeVar('_V') + + +class BaseContainer(Sequence[_T]): + """Base container class.""" + + # Minimizes memory usage and disallows assignment to other attributes. + __slots__ = ['_message_listener', '_values'] + + def __init__(self, message_listener: Any) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The RepeatedScalarFieldContainer will call this object's + Modified() method when it is modified. + """ + self._message_listener = message_listener + self._values = [] + + @overload + def __getitem__(self, key: int) -> _T: + ... + + @overload + def __getitem__(self, key: slice) -> List[_T]: + ... + + def __getitem__(self, key): + """Retrieves item by the specified key.""" + return self._values[key] + + def __len__(self) -> int: + """Returns the number of elements in the container.""" + return len(self._values) + + def __ne__(self, other: Any) -> bool: + """Checks if another instance isn't equal to this one.""" + # The concrete classes should define __eq__. + return not self == other + + __hash__ = None + + def __repr__(self) -> str: + return repr(self._values) + + def sort(self, *args, **kwargs) -> None: + # Continue to support the old sort_function keyword argument. + # This is expected to be a rare occurrence, so use LBYL to avoid + # the overhead of actually catching KeyError. + if 'sort_function' in kwargs: + kwargs['cmp'] = kwargs.pop('sort_function') + self._values.sort(*args, **kwargs) + + def reverse(self) -> None: + self._values.reverse() + + +# TODO(slebedev): Remove this. BaseContainer does *not* conform to +# MutableSequence, only its subclasses do. +collections.abc.MutableSequence.register(BaseContainer) + + +class RepeatedScalarFieldContainer(BaseContainer[_T], MutableSequence[_T]): + """Simple, type-checked, list-like container for holding repeated scalars.""" + + # Disallows assignment to other attributes. + __slots__ = ['_type_checker'] + + def __init__( + self, + message_listener: Any, + type_checker: Any, + ) -> None: + """Args: + + message_listener: A MessageListener implementation. The + RepeatedScalarFieldContainer will call this object's Modified() method + when it is modified. + type_checker: A type_checkers.ValueChecker instance to run on elements + inserted into this container. + """ + super().__init__(message_listener) + self._type_checker = type_checker + + def append(self, value: _T) -> None: + """Appends an item to the list. Similar to list.append().""" + self._values.append(self._type_checker.CheckValue(value)) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def insert(self, key: int, value: _T) -> None: + """Inserts the item at the specified position. Similar to list.insert().""" + self._values.insert(key, self._type_checker.CheckValue(value)) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def extend(self, elem_seq: Iterable[_T]) -> None: + """Extends by appending the given iterable. Similar to list.extend().""" + if elem_seq is None: + return + try: + elem_seq_iter = iter(elem_seq) + except TypeError: + if not elem_seq: + # silently ignore falsy inputs :-/. + # TODO(ptucker): Deprecate this behavior. b/18413862 + return + raise + + new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter] + if new_values: + self._values.extend(new_values) + self._message_listener.Modified() + + def MergeFrom( + self, + other: Union['RepeatedScalarFieldContainer[_T]', Iterable[_T]], + ) -> None: + """Appends the contents of another repeated field of the same type to this + one. We do not check the types of the individual fields. + """ + self._values.extend(other) + self._message_listener.Modified() + + def remove(self, elem: _T): + """Removes an item from the list. Similar to list.remove().""" + self._values.remove(elem) + self._message_listener.Modified() + + def pop(self, key: Optional[int] = -1) -> _T: + """Removes and returns an item at a given index. Similar to list.pop().""" + value = self._values[key] + self.__delitem__(key) + return value + + @overload + def __setitem__(self, key: int, value: _T) -> None: + ... + + @overload + def __setitem__(self, key: slice, value: Iterable[_T]) -> None: + ... + + def __setitem__(self, key, value) -> None: + """Sets the item on the specified position.""" + if isinstance(key, slice): + if key.step is not None: + raise ValueError('Extended slices not supported') + self._values[key] = map(self._type_checker.CheckValue, value) + self._message_listener.Modified() + else: + self._values[key] = self._type_checker.CheckValue(value) + self._message_listener.Modified() + + def __delitem__(self, key: Union[int, slice]) -> None: + """Deletes the item at the specified position.""" + del self._values[key] + self._message_listener.Modified() + + def __eq__(self, other: Any) -> bool: + """Compares the current instance with another one.""" + if self is other: + return True + # Special case for the same type which should be common and fast. + if isinstance(other, self.__class__): + return other._values == self._values + # We are presumably comparing against some other sequence type. + return other == self._values + + def __deepcopy__( + self, + unused_memo: Any = None, + ) -> 'RepeatedScalarFieldContainer[_T]': + clone = RepeatedScalarFieldContainer( + copy.deepcopy(self._message_listener), self._type_checker) + clone.MergeFrom(self) + return clone + + def __reduce__(self, **kwargs) -> NoReturn: + raise pickle.PickleError( + "Can't pickle repeated scalar fields, convert to list first") + + +# TODO(slebedev): Constrain T to be a subtype of Message. +class RepeatedCompositeFieldContainer(BaseContainer[_T], MutableSequence[_T]): + """Simple, list-like container for holding repeated composite fields.""" + + # Disallows assignment to other attributes. + __slots__ = ['_message_descriptor'] + + def __init__(self, message_listener: Any, message_descriptor: Any) -> None: + """ + Note that we pass in a descriptor instead of the generated directly, + since at the time we construct a _RepeatedCompositeFieldContainer we + haven't yet necessarily initialized the type that will be contained in the + container. + + Args: + message_listener: A MessageListener implementation. + The RepeatedCompositeFieldContainer will call this object's + Modified() method when it is modified. + message_descriptor: A Descriptor instance describing the protocol type + that should be present in this container. We'll use the + _concrete_class field of this descriptor when the client calls add(). + """ + super().__init__(message_listener) + self._message_descriptor = message_descriptor + + def add(self, **kwargs: Any) -> _T: + """Adds a new element at the end of the list and returns it. Keyword + arguments may be used to initialize the element. + """ + new_element = self._message_descriptor._concrete_class(**kwargs) + new_element._SetListener(self._message_listener) + self._values.append(new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + return new_element + + def append(self, value: _T) -> None: + """Appends one element by copying the message.""" + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + new_element.CopyFrom(value) + self._values.append(new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def insert(self, key: int, value: _T) -> None: + """Inserts the item at the specified position by copying.""" + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + new_element.CopyFrom(value) + self._values.insert(key, new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def extend(self, elem_seq: Iterable[_T]) -> None: + """Extends by appending the given sequence of elements of the same type + + as this one, copying each individual message. + """ + message_class = self._message_descriptor._concrete_class + listener = self._message_listener + values = self._values + for message in elem_seq: + new_element = message_class() + new_element._SetListener(listener) + new_element.MergeFrom(message) + values.append(new_element) + listener.Modified() + + def MergeFrom( + self, + other: Union['RepeatedCompositeFieldContainer[_T]', Iterable[_T]], + ) -> None: + """Appends the contents of another repeated field of the same type to this + one, copying each individual message. + """ + self.extend(other) + + def remove(self, elem: _T) -> None: + """Removes an item from the list. Similar to list.remove().""" + self._values.remove(elem) + self._message_listener.Modified() + + def pop(self, key: Optional[int] = -1) -> _T: + """Removes and returns an item at a given index. Similar to list.pop().""" + value = self._values[key] + self.__delitem__(key) + return value + + @overload + def __setitem__(self, key: int, value: _T) -> None: + ... + + @overload + def __setitem__(self, key: slice, value: Iterable[_T]) -> None: + ... + + def __setitem__(self, key, value): + # This method is implemented to make RepeatedCompositeFieldContainer + # structurally compatible with typing.MutableSequence. It is + # otherwise unsupported and will always raise an error. + raise TypeError( + f'{self.__class__.__name__} object does not support item assignment') + + def __delitem__(self, key: Union[int, slice]) -> None: + """Deletes the item at the specified position.""" + del self._values[key] + self._message_listener.Modified() + + def __eq__(self, other: Any) -> bool: + """Compares the current instance with another one.""" + if self is other: + return True + if not isinstance(other, self.__class__): + raise TypeError('Can only compare repeated composite fields against ' + 'other repeated composite fields.') + return self._values == other._values + + +class ScalarMap(MutableMapping[_K, _V]): + """Simple, type-checked, dict-like container for holding repeated scalars.""" + + # Disallows assignment to other attributes. + __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener', + '_entry_descriptor'] + + def __init__( + self, + message_listener: Any, + key_checker: Any, + value_checker: Any, + entry_descriptor: Any, + ) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The ScalarMap will call this object's Modified() method when it + is modified. + key_checker: A type_checkers.ValueChecker instance to run on keys + inserted into this container. + value_checker: A type_checkers.ValueChecker instance to run on values + inserted into this container. + entry_descriptor: The MessageDescriptor of a map entry: key and value. + """ + self._message_listener = message_listener + self._key_checker = key_checker + self._value_checker = value_checker + self._entry_descriptor = entry_descriptor + self._values = {} + + def __getitem__(self, key: _K) -> _V: + try: + return self._values[key] + except KeyError: + key = self._key_checker.CheckValue(key) + val = self._value_checker.DefaultValue() + self._values[key] = val + return val + + def __contains__(self, item: _K) -> bool: + # We check the key's type to match the strong-typing flavor of the API. + # Also this makes it easier to match the behavior of the C++ implementation. + self._key_checker.CheckValue(item) + return item in self._values + + @overload + def get(self, key: _K) -> Optional[_V]: + ... + + @overload + def get(self, key: _K, default: _T) -> Union[_V, _T]: + ... + + # We need to override this explicitly, because our defaultdict-like behavior + # will make the default implementation (from our base class) always insert + # the key. + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + + def __setitem__(self, key: _K, value: _V) -> _T: + checked_key = self._key_checker.CheckValue(key) + checked_value = self._value_checker.CheckValue(value) + self._values[checked_key] = checked_value + self._message_listener.Modified() + + def __delitem__(self, key: _K) -> None: + del self._values[key] + self._message_listener.Modified() + + def __len__(self) -> int: + return len(self._values) + + def __iter__(self) -> Iterator[_K]: + return iter(self._values) + + def __repr__(self) -> str: + return repr(self._values) + + def MergeFrom(self, other: 'ScalarMap[_K, _V]') -> None: + self._values.update(other._values) + self._message_listener.Modified() + + def InvalidateIterators(self) -> None: + # It appears that the only way to reliably invalidate iterators to + # self._values is to ensure that its size changes. + original = self._values + self._values = original.copy() + original[None] = None + + # This is defined in the abstract base, but we can do it much more cheaply. + def clear(self) -> None: + self._values.clear() + self._message_listener.Modified() + + def GetEntryClass(self) -> Any: + return self._entry_descriptor._concrete_class + + +class MessageMap(MutableMapping[_K, _V]): + """Simple, type-checked, dict-like container for with submessage values.""" + + # Disallows assignment to other attributes. + __slots__ = ['_key_checker', '_values', '_message_listener', + '_message_descriptor', '_entry_descriptor'] + + def __init__( + self, + message_listener: Any, + message_descriptor: Any, + key_checker: Any, + entry_descriptor: Any, + ) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The ScalarMap will call this object's Modified() method when it + is modified. + key_checker: A type_checkers.ValueChecker instance to run on keys + inserted into this container. + value_checker: A type_checkers.ValueChecker instance to run on values + inserted into this container. + entry_descriptor: The MessageDescriptor of a map entry: key and value. + """ + self._message_listener = message_listener + self._message_descriptor = message_descriptor + self._key_checker = key_checker + self._entry_descriptor = entry_descriptor + self._values = {} + + def __getitem__(self, key: _K) -> _V: + key = self._key_checker.CheckValue(key) + try: + return self._values[key] + except KeyError: + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + self._values[key] = new_element + self._message_listener.Modified() + return new_element + + def get_or_create(self, key: _K) -> _V: + """get_or_create() is an alias for getitem (ie. map[key]). + + Args: + key: The key to get or create in the map. + + This is useful in cases where you want to be explicit that the call is + mutating the map. This can avoid lint errors for statements like this + that otherwise would appear to be pointless statements: + + msg.my_map[key] + """ + return self[key] + + @overload + def get(self, key: _K) -> Optional[_V]: + ... + + @overload + def get(self, key: _K, default: _T) -> Union[_V, _T]: + ... + + # We need to override this explicitly, because our defaultdict-like behavior + # will make the default implementation (from our base class) always insert + # the key. + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + + def __contains__(self, item: _K) -> bool: + item = self._key_checker.CheckValue(item) + return item in self._values + + def __setitem__(self, key: _K, value: _V) -> NoReturn: + raise ValueError('May not set values directly, call my_map[key].foo = 5') + + def __delitem__(self, key: _K) -> None: + key = self._key_checker.CheckValue(key) + del self._values[key] + self._message_listener.Modified() + + def __len__(self) -> int: + return len(self._values) + + def __iter__(self) -> Iterator[_K]: + return iter(self._values) + + def __repr__(self) -> str: + return repr(self._values) + + def MergeFrom(self, other: 'MessageMap[_K, _V]') -> None: + # pylint: disable=protected-access + for key in other._values: + # According to documentation: "When parsing from the wire or when merging, + # if there are duplicate map keys the last key seen is used". + if key in self: + del self[key] + self[key].CopyFrom(other[key]) + # self._message_listener.Modified() not required here, because + # mutations to submessages already propagate. + + def InvalidateIterators(self) -> None: + # It appears that the only way to reliably invalidate iterators to + # self._values is to ensure that its size changes. + original = self._values + self._values = original.copy() + original[None] = None + + # This is defined in the abstract base, but we can do it much more cheaply. + def clear(self) -> None: + self._values.clear() + self._message_listener.Modified() + + def GetEntryClass(self) -> Any: + return self._entry_descriptor._concrete_class + + +class _UnknownField: + """A parsed unknown field.""" + + # Disallows assignment to other attributes. + __slots__ = ['_field_number', '_wire_type', '_data'] + + def __init__(self, field_number, wire_type, data): + self._field_number = field_number + self._wire_type = wire_type + self._data = data + return + + def __lt__(self, other): + # pylint: disable=protected-access + return self._field_number < other._field_number + + def __eq__(self, other): + if self is other: + return True + # pylint: disable=protected-access + return (self._field_number == other._field_number and + self._wire_type == other._wire_type and + self._data == other._data) + + +class UnknownFieldRef: # pylint: disable=missing-class-docstring + + def __init__(self, parent, index): + self._parent = parent + self._index = index + + def _check_valid(self): + if not self._parent: + raise ValueError('UnknownField does not exist. ' + 'The parent message might be cleared.') + if self._index >= len(self._parent): + raise ValueError('UnknownField does not exist. ' + 'The parent message might be cleared.') + + @property + def field_number(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._field_number + + @property + def wire_type(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._wire_type + + @property + def data(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._data + + +class UnknownFieldSet: + """UnknownField container""" + + # Disallows assignment to other attributes. + __slots__ = ['_values'] + + def __init__(self): + self._values = [] + + def __getitem__(self, index): + if self._values is None: + raise ValueError('UnknownFields does not exist. ' + 'The parent message might be cleared.') + size = len(self._values) + if index < 0: + index += size + if index < 0 or index >= size: + raise IndexError('index %d out of range'.index) + + return UnknownFieldRef(self, index) + + def _internal_get(self, index): + return self._values[index] + + def __len__(self): + if self._values is None: + raise ValueError('UnknownFields does not exist. ' + 'The parent message might be cleared.') + return len(self._values) + + def _add(self, field_number, wire_type, data): + unknown_field = _UnknownField(field_number, wire_type, data) + self._values.append(unknown_field) + return unknown_field + + def __iter__(self): + for i in range(len(self)): + yield UnknownFieldRef(self, i) + + def _extend(self, other): + if other is None: + return + # pylint: disable=protected-access + self._values.extend(other._values) + + def __eq__(self, other): + if self is other: + return True + # Sort unknown fields because their order shouldn't + # affect equality test. + values = list(self._values) + if other is None: + return not values + values.sort() + # pylint: disable=protected-access + other_values = sorted(other._values) + return values == other_values + + def _clear(self): + for value in self._values: + # pylint: disable=protected-access + if isinstance(value._data, UnknownFieldSet): + value._data._clear() # pylint: disable=protected-access + self._values = None diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/decoder.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/decoder.py new file mode 100644 index 0000000000..bc1b7b785c --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/decoder.py @@ -0,0 +1,1029 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Code for decoding protocol buffer primitives. + +This code is very similar to encoder.py -- read the docs for that module first. + +A "decoder" is a function with the signature: + Decode(buffer, pos, end, message, field_dict) +The arguments are: + buffer: The string containing the encoded message. + pos: The current position in the string. + end: The position in the string where the current message ends. May be + less than len(buffer) if we're reading a sub-message. + message: The message object into which we're parsing. + field_dict: message._fields (avoids a hashtable lookup). +The decoder reads the field and stores it into field_dict, returning the new +buffer position. A decoder for a repeated field may proactively decode all of +the elements of that field, if they appear consecutively. + +Note that decoders may throw any of the following: + IndexError: Indicates a truncated message. + struct.error: Unpacking of a fixed-width field failed. + message.DecodeError: Other errors. + +Decoders are expected to raise an exception if they are called with pos > end. +This allows callers to be lax about bounds checking: it's fineto read past +"end" as long as you are sure that someone else will notice and throw an +exception later on. + +Something up the call stack is expected to catch IndexError and struct.error +and convert them to message.DecodeError. + +Decoders are constructed using decoder constructors with the signature: + MakeDecoder(field_number, is_repeated, is_packed, key, new_default) +The arguments are: + field_number: The field number of the field we want to decode. + is_repeated: Is the field a repeated field? (bool) + is_packed: Is the field a packed field? (bool) + key: The key to use when looking up the field within field_dict. + (This is actually the FieldDescriptor but nothing in this + file should depend on that.) + new_default: A function which takes a message object as a parameter and + returns a new instance of the default value for this field. + (This is called for repeated fields and sub-messages, when an + instance does not already exist.) + +As with encoders, we define a decoder constructor for every type of field. +Then, for every field of every message class we construct an actual decoder. +That decoder goes into a dict indexed by tag, so when we decode a message +we repeatedly read a tag, look up the corresponding decoder, and invoke it. +""" + +__author__ = 'kenton@google.com (Kenton Varda)' + +import math +import struct + +from google.protobuf.internal import containers +from google.protobuf.internal import encoder +from google.protobuf.internal import wire_format +from google.protobuf import message + + +# This is not for optimization, but rather to avoid conflicts with local +# variables named "message". +_DecodeError = message.DecodeError + + +def _VarintDecoder(mask, result_type): + """Return an encoder for a basic varint value (does not include tag). + + Decoded values will be bitwise-anded with the given mask before being + returned, e.g. to limit them to 32 bits. The returned decoder does not + take the usual "end" parameter -- the caller is expected to do bounds checking + after the fact (often the caller can defer such checking until later). The + decoder returns a (value, new_pos) pair. + """ + + def DecodeVarint(buffer, pos): + result = 0 + shift = 0 + while 1: + b = buffer[pos] + result |= ((b & 0x7f) << shift) + pos += 1 + if not (b & 0x80): + result &= mask + result = result_type(result) + return (result, pos) + shift += 7 + if shift >= 64: + raise _DecodeError('Too many bytes when decoding varint.') + return DecodeVarint + + +def _SignedVarintDecoder(bits, result_type): + """Like _VarintDecoder() but decodes signed values.""" + + signbit = 1 << (bits - 1) + mask = (1 << bits) - 1 + + def DecodeVarint(buffer, pos): + result = 0 + shift = 0 + while 1: + b = buffer[pos] + result |= ((b & 0x7f) << shift) + pos += 1 + if not (b & 0x80): + result &= mask + result = (result ^ signbit) - signbit + result = result_type(result) + return (result, pos) + shift += 7 + if shift >= 64: + raise _DecodeError('Too many bytes when decoding varint.') + return DecodeVarint + +# All 32-bit and 64-bit values are represented as int. +_DecodeVarint = _VarintDecoder((1 << 64) - 1, int) +_DecodeSignedVarint = _SignedVarintDecoder(64, int) + +# Use these versions for values which must be limited to 32 bits. +_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int) +_DecodeSignedVarint32 = _SignedVarintDecoder(32, int) + + +def ReadTag(buffer, pos): + """Read a tag from the memoryview, and return a (tag_bytes, new_pos) tuple. + + We return the raw bytes of the tag rather than decoding them. The raw + bytes can then be used to look up the proper decoder. This effectively allows + us to trade some work that would be done in pure-python (decoding a varint) + for work that is done in C (searching for a byte string in a hash table). + In a low-level language it would be much cheaper to decode the varint and + use that, but not in Python. + + Args: + buffer: memoryview object of the encoded bytes + pos: int of the current position to start from + + Returns: + Tuple[bytes, int] of the tag data and new position. + """ + start = pos + while buffer[pos] & 0x80: + pos += 1 + pos += 1 + + tag_bytes = buffer[start:pos].tobytes() + return tag_bytes, pos + + +# -------------------------------------------------------------------- + + +def _SimpleDecoder(wire_type, decode_value): + """Return a constructor for a decoder for fields of a particular type. + + Args: + wire_type: The field's wire type. + decode_value: A function which decodes an individual value, e.g. + _DecodeVarint() + """ + + def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default, + clear_if_default=False): + if is_packed: + local_DecodeVarint = _DecodeVarint + def DecodePackedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + (endpoint, pos) = local_DecodeVarint(buffer, pos) + endpoint += pos + if endpoint > end: + raise _DecodeError('Truncated message.') + while pos < endpoint: + (element, pos) = decode_value(buffer, pos) + value.append(element) + if pos > endpoint: + del value[-1] # Discard corrupt value. + raise _DecodeError('Packed element was truncated.') + return pos + return DecodePackedField + elif is_repeated: + tag_bytes = encoder.TagBytes(field_number, wire_type) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (element, new_pos) = decode_value(buffer, pos) + value.append(element) + # Predict that the next tag is another copy of the same repeated + # field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos >= end: + # Prediction failed. Return. + if new_pos > end: + raise _DecodeError('Truncated message.') + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (new_value, pos) = decode_value(buffer, pos) + if pos > end: + raise _DecodeError('Truncated message.') + if clear_if_default and not new_value: + field_dict.pop(key, None) + else: + field_dict[key] = new_value + return pos + return DecodeField + + return SpecificDecoder + + +def _ModifiedDecoder(wire_type, decode_value, modify_value): + """Like SimpleDecoder but additionally invokes modify_value on every value + before storing it. Usually modify_value is ZigZagDecode. + """ + + # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but + # not enough to make a significant difference. + + def InnerDecode(buffer, pos): + (result, new_pos) = decode_value(buffer, pos) + return (modify_value(result), new_pos) + return _SimpleDecoder(wire_type, InnerDecode) + + +def _StructPackDecoder(wire_type, format): + """Return a constructor for a decoder for a fixed-width field. + + Args: + wire_type: The field's wire type. + format: The format string to pass to struct.unpack(). + """ + + value_size = struct.calcsize(format) + local_unpack = struct.unpack + + # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but + # not enough to make a significant difference. + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + + def InnerDecode(buffer, pos): + new_pos = pos + value_size + result = local_unpack(format, buffer[pos:new_pos])[0] + return (result, new_pos) + return _SimpleDecoder(wire_type, InnerDecode) + + +def _FloatDecoder(): + """Returns a decoder for a float field. + + This code works around a bug in struct.unpack for non-finite 32-bit + floating-point values. + """ + + local_unpack = struct.unpack + + def InnerDecode(buffer, pos): + """Decode serialized float to a float and new position. + + Args: + buffer: memoryview of the serialized bytes + pos: int, position in the memory view to start at. + + Returns: + Tuple[float, int] of the deserialized float value and new position + in the serialized data. + """ + # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign + # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand. + new_pos = pos + 4 + float_bytes = buffer[pos:new_pos].tobytes() + + # If this value has all its exponent bits set, then it's non-finite. + # In Python 2.4, struct.unpack will convert it to a finite 64-bit value. + # To avoid that, we parse it specially. + if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'): + # If at least one significand bit is set... + if float_bytes[0:3] != b'\x00\x00\x80': + return (math.nan, new_pos) + # If sign bit is set... + if float_bytes[3:4] == b'\xFF': + return (-math.inf, new_pos) + return (math.inf, new_pos) + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + result = local_unpack('= b'\xF0') + and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')): + return (math.nan, new_pos) + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + result = local_unpack(' end: + raise _DecodeError('Truncated message.') + while pos < endpoint: + value_start_pos = pos + (element, pos) = _DecodeSignedVarint32(buffer, pos) + # pylint: disable=protected-access + if element in enum_type.values_by_number: + value.append(element) + else: + if not message._unknown_fields: + message._unknown_fields = [] + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_VARINT) + + message._unknown_fields.append( + (tag_bytes, buffer[value_start_pos:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, element) + # pylint: enable=protected-access + if pos > endpoint: + if element in enum_type.values_by_number: + del value[-1] # Discard corrupt value. + else: + del message._unknown_fields[-1] + # pylint: disable=protected-access + del message._unknown_field_set._values[-1] + # pylint: enable=protected-access + raise _DecodeError('Packed element was truncated.') + return pos + return DecodePackedField + elif is_repeated: + tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + """Decode serialized repeated enum to its value and a new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (element, new_pos) = _DecodeSignedVarint32(buffer, pos) + # pylint: disable=protected-access + if element in enum_type.values_by_number: + value.append(element) + else: + if not message._unknown_fields: + message._unknown_fields = [] + message._unknown_fields.append( + (tag_bytes, buffer[pos:new_pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, element) + # pylint: enable=protected-access + # Predict that the next tag is another copy of the same repeated + # field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos >= end: + # Prediction failed. Return. + if new_pos > end: + raise _DecodeError('Truncated message.') + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + """Decode serialized repeated enum to its value and a new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + value_start_pos = pos + (enum_value, pos) = _DecodeSignedVarint32(buffer, pos) + if pos > end: + raise _DecodeError('Truncated message.') + if clear_if_default and not enum_value: + field_dict.pop(key, None) + return pos + # pylint: disable=protected-access + if enum_value in enum_type.values_by_number: + field_dict[key] = enum_value + else: + if not message._unknown_fields: + message._unknown_fields = [] + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_VARINT) + message._unknown_fields.append( + (tag_bytes, buffer[value_start_pos:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, enum_value) + # pylint: enable=protected-access + return pos + return DecodeField + + +# -------------------------------------------------------------------- + + +Int32Decoder = _SimpleDecoder( + wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32) + +Int64Decoder = _SimpleDecoder( + wire_format.WIRETYPE_VARINT, _DecodeSignedVarint) + +UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32) +UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint) + +SInt32Decoder = _ModifiedDecoder( + wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode) +SInt64Decoder = _ModifiedDecoder( + wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode) + +# Note that Python conveniently guarantees that when using the '<' prefix on +# formats, they will also have the same size across all platforms (as opposed +# to without the prefix, where their sizes depend on the C compiler's basic +# type sizes). +Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, ' end: + raise _DecodeError('Truncated string.') + value.append(_ConvertToUnicode(buffer[pos:new_pos])) + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + if clear_if_default and not size: + field_dict.pop(key, None) + else: + field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos]) + return new_pos + return DecodeField + + +def BytesDecoder(field_number, is_repeated, is_packed, key, new_default, + clear_if_default=False): + """Returns a decoder for a bytes field.""" + + local_DecodeVarint = _DecodeVarint + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + value.append(buffer[pos:new_pos].tobytes()) + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + if clear_if_default and not size: + field_dict.pop(key, None) + else: + field_dict[key] = buffer[pos:new_pos].tobytes() + return new_pos + return DecodeField + + +def GroupDecoder(field_number, is_repeated, is_packed, key, new_default): + """Returns a decoder for a group field.""" + + end_tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_END_GROUP) + end_tag_len = len(end_tag_bytes) + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_START_GROUP) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read sub-message. + pos = value.add()._InternalParse(buffer, pos, end) + # Read end tag. + new_pos = pos+end_tag_len + if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: + raise _DecodeError('Missing group end tag.') + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read sub-message. + pos = value._InternalParse(buffer, pos, end) + # Read end tag. + new_pos = pos+end_tag_len + if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: + raise _DecodeError('Missing group end tag.') + return new_pos + return DecodeField + + +def MessageDecoder(field_number, is_repeated, is_packed, key, new_default): + """Returns a decoder for a message field.""" + + local_DecodeVarint = _DecodeVarint + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + if value.add()._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + if value._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it encountered + # an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + return new_pos + return DecodeField + + +# -------------------------------------------------------------------- + +MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP) + +def MessageSetItemDecoder(descriptor): + """Returns a decoder for a MessageSet item. + + The parameter is the message Descriptor. + + The message set message looks like this: + message MessageSet { + repeated group Item = 1 { + required int32 type_id = 2; + required string message = 3; + } + } + """ + + type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT) + message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED) + item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP) + + local_ReadTag = ReadTag + local_DecodeVarint = _DecodeVarint + local_SkipField = SkipField + + def DecodeItem(buffer, pos, end, message, field_dict): + """Decode serialized message set to its value and new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + message_set_item_start = pos + type_id = -1 + message_start = -1 + message_end = -1 + + # Technically, type_id and message can appear in any order, so we need + # a little loop here. + while 1: + (tag_bytes, pos) = local_ReadTag(buffer, pos) + if tag_bytes == type_id_tag_bytes: + (type_id, pos) = local_DecodeVarint(buffer, pos) + elif tag_bytes == message_tag_bytes: + (size, message_start) = local_DecodeVarint(buffer, pos) + pos = message_end = message_start + size + elif tag_bytes == item_end_tag_bytes: + break + else: + pos = SkipField(buffer, pos, end, tag_bytes) + if pos == -1: + raise _DecodeError('Missing group end tag.') + + if pos > end: + raise _DecodeError('Truncated message.') + + if type_id == -1: + raise _DecodeError('MessageSet item missing type_id.') + if message_start == -1: + raise _DecodeError('MessageSet item missing message.') + + extension = message.Extensions._FindExtensionByNumber(type_id) + # pylint: disable=protected-access + if extension is not None: + value = field_dict.get(extension) + if value is None: + message_type = extension.message_type + if not hasattr(message_type, '_concrete_class'): + # pylint: disable=protected-access + message._FACTORY.GetPrototype(message_type) + value = field_dict.setdefault( + extension, message_type._concrete_class()) + if value._InternalParse(buffer, message_start,message_end) != message_end: + # The only reason _InternalParse would return early is if it encountered + # an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + else: + if not message._unknown_fields: + message._unknown_fields = [] + message._unknown_fields.append( + (MESSAGE_SET_ITEM_TAG, buffer[message_set_item_start:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + type_id, + wire_format.WIRETYPE_LENGTH_DELIMITED, + buffer[message_start:message_end].tobytes()) + # pylint: enable=protected-access + + return pos + + return DecodeItem + +# -------------------------------------------------------------------- + +def MapDecoder(field_descriptor, new_default, is_message_map): + """Returns a decoder for a map field.""" + + key = field_descriptor + tag_bytes = encoder.TagBytes(field_descriptor.number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + local_DecodeVarint = _DecodeVarint + # Can't read _concrete_class yet; might not be initialized. + message_type = field_descriptor.message_type + + def DecodeMap(buffer, pos, end, message, field_dict): + submsg = message_type._concrete_class() + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + submsg.Clear() + if submsg._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + + if is_message_map: + value[submsg.key].CopyFrom(submsg.value) + else: + value[submsg.key] = submsg.value + + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + + return DecodeMap + +# -------------------------------------------------------------------- +# Optimization is not as heavy here because calls to SkipField() are rare, +# except for handling end-group tags. + +def _SkipVarint(buffer, pos, end): + """Skip a varint value. Returns the new position.""" + # Previously ord(buffer[pos]) raised IndexError when pos is out of range. + # With this code, ord(b'') raises TypeError. Both are handled in + # python_message.py to generate a 'Truncated message' error. + while ord(buffer[pos:pos+1].tobytes()) & 0x80: + pos += 1 + pos += 1 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + +def _SkipFixed64(buffer, pos, end): + """Skip a fixed64 value. Returns the new position.""" + + pos += 8 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + + +def _DecodeFixed64(buffer, pos): + """Decode a fixed64.""" + new_pos = pos + 8 + return (struct.unpack(' end: + raise _DecodeError('Truncated message.') + return pos + + +def _SkipGroup(buffer, pos, end): + """Skip sub-group. Returns the new position.""" + + while 1: + (tag_bytes, pos) = ReadTag(buffer, pos) + new_pos = SkipField(buffer, pos, end, tag_bytes) + if new_pos == -1: + return pos + pos = new_pos + + +def _DecodeUnknownFieldSet(buffer, pos, end_pos=None): + """Decode UnknownFieldSet. Returns the UnknownFieldSet and new position.""" + + unknown_field_set = containers.UnknownFieldSet() + while end_pos is None or pos < end_pos: + (tag_bytes, pos) = ReadTag(buffer, pos) + (tag, _) = _DecodeVarint(tag_bytes, 0) + field_number, wire_type = wire_format.UnpackTag(tag) + if wire_type == wire_format.WIRETYPE_END_GROUP: + break + (data, pos) = _DecodeUnknownField(buffer, pos, wire_type) + # pylint: disable=protected-access + unknown_field_set._add(field_number, wire_type, data) + + return (unknown_field_set, pos) + + +def _DecodeUnknownField(buffer, pos, wire_type): + """Decode a unknown field. Returns the UnknownField and new position.""" + + if wire_type == wire_format.WIRETYPE_VARINT: + (data, pos) = _DecodeVarint(buffer, pos) + elif wire_type == wire_format.WIRETYPE_FIXED64: + (data, pos) = _DecodeFixed64(buffer, pos) + elif wire_type == wire_format.WIRETYPE_FIXED32: + (data, pos) = _DecodeFixed32(buffer, pos) + elif wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED: + (size, pos) = _DecodeVarint(buffer, pos) + data = buffer[pos:pos+size].tobytes() + pos += size + elif wire_type == wire_format.WIRETYPE_START_GROUP: + (data, pos) = _DecodeUnknownFieldSet(buffer, pos) + elif wire_type == wire_format.WIRETYPE_END_GROUP: + return (0, -1) + else: + raise _DecodeError('Wrong wire type in tag.') + + return (data, pos) + + +def _EndGroup(buffer, pos, end): + """Skipping an END_GROUP tag returns -1 to tell the parent loop to break.""" + + return -1 + + +def _SkipFixed32(buffer, pos, end): + """Skip a fixed32 value. Returns the new position.""" + + pos += 4 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + + +def _DecodeFixed32(buffer, pos): + """Decode a fixed32.""" + + new_pos = pos + 4 + return (struct.unpack('B').pack + + def EncodeVarint(write, value, unused_deterministic=None): + bits = value & 0x7f + value >>= 7 + while value: + write(local_int2byte(0x80|bits)) + bits = value & 0x7f + value >>= 7 + return write(local_int2byte(bits)) + + return EncodeVarint + + +def _SignedVarintEncoder(): + """Return an encoder for a basic signed varint value (does not include + tag).""" + + local_int2byte = struct.Struct('>B').pack + + def EncodeSignedVarint(write, value, unused_deterministic=None): + if value < 0: + value += (1 << 64) + bits = value & 0x7f + value >>= 7 + while value: + write(local_int2byte(0x80|bits)) + bits = value & 0x7f + value >>= 7 + return write(local_int2byte(bits)) + + return EncodeSignedVarint + + +_EncodeVarint = _VarintEncoder() +_EncodeSignedVarint = _SignedVarintEncoder() + + +def _VarintBytes(value): + """Encode the given integer as a varint and return the bytes. This is only + called at startup time so it doesn't need to be fast.""" + + pieces = [] + _EncodeVarint(pieces.append, value, True) + return b"".join(pieces) + + +def TagBytes(field_number, wire_type): + """Encode the given tag and return the bytes. Only called at startup.""" + + return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type))) + +# -------------------------------------------------------------------- +# As with sizers (see above), we have a number of common encoder +# implementations. + + +def _SimpleEncoder(wire_type, encode_value, compute_value_size): + """Return a constructor for an encoder for fields of a particular type. + + Args: + wire_type: The field's wire type, for encoding tags. + encode_value: A function which encodes an individual value, e.g. + _EncodeVarint(). + compute_value_size: A function which computes the size of an individual + value, e.g. _VarintSize(). + """ + + def SpecificEncoder(field_number, is_repeated, is_packed): + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + size = 0 + for element in value: + size += compute_value_size(element) + local_EncodeVarint(write, size, deterministic) + for element in value: + encode_value(write, element, deterministic) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, deterministic): + for element in value: + write(tag_bytes) + encode_value(write, element, deterministic) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, deterministic): + write(tag_bytes) + return encode_value(write, value, deterministic) + return EncodeField + + return SpecificEncoder + + +def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value): + """Like SimpleEncoder but additionally invokes modify_value on every value + before passing it to encode_value. Usually modify_value is ZigZagEncode.""" + + def SpecificEncoder(field_number, is_repeated, is_packed): + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + size = 0 + for element in value: + size += compute_value_size(modify_value(element)) + local_EncodeVarint(write, size, deterministic) + for element in value: + encode_value(write, modify_value(element), deterministic) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, deterministic): + for element in value: + write(tag_bytes) + encode_value(write, modify_value(element), deterministic) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, deterministic): + write(tag_bytes) + return encode_value(write, modify_value(value), deterministic) + return EncodeField + + return SpecificEncoder + + +def _StructPackEncoder(wire_type, format): + """Return a constructor for an encoder for a fixed-width field. + + Args: + wire_type: The field's wire type, for encoding tags. + format: The format string to pass to struct.pack(). + """ + + value_size = struct.calcsize(format) + + def SpecificEncoder(field_number, is_repeated, is_packed): + local_struct_pack = struct.pack + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + local_EncodeVarint(write, len(value) * value_size, deterministic) + for element in value: + write(local_struct_pack(format, element)) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, unused_deterministic=None): + for element in value: + write(tag_bytes) + write(local_struct_pack(format, element)) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, unused_deterministic=None): + write(tag_bytes) + return write(local_struct_pack(format, value)) + return EncodeField + + return SpecificEncoder + + +def _FloatingPointEncoder(wire_type, format): + """Return a constructor for an encoder for float fields. + + This is like StructPackEncoder, but catches errors that may be due to + passing non-finite floating-point values to struct.pack, and makes a + second attempt to encode those values. + + Args: + wire_type: The field's wire type, for encoding tags. + format: The format string to pass to struct.pack(). + """ + + value_size = struct.calcsize(format) + if value_size == 4: + def EncodeNonFiniteOrRaise(write, value): + # Remember that the serialized form uses little-endian byte order. + if value == _POS_INF: + write(b'\x00\x00\x80\x7F') + elif value == _NEG_INF: + write(b'\x00\x00\x80\xFF') + elif value != value: # NaN + write(b'\x00\x00\xC0\x7F') + else: + raise + elif value_size == 8: + def EncodeNonFiniteOrRaise(write, value): + if value == _POS_INF: + write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F') + elif value == _NEG_INF: + write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF') + elif value != value: # NaN + write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F') + else: + raise + else: + raise ValueError('Can\'t encode floating-point values that are ' + '%d bytes long (only 4 or 8)' % value_size) + + def SpecificEncoder(field_number, is_repeated, is_packed): + local_struct_pack = struct.pack + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + local_EncodeVarint(write, len(value) * value_size, deterministic) + for element in value: + # This try/except block is going to be faster than any code that + # we could write to check whether element is finite. + try: + write(local_struct_pack(format, element)) + except SystemError: + EncodeNonFiniteOrRaise(write, element) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, unused_deterministic=None): + for element in value: + write(tag_bytes) + try: + write(local_struct_pack(format, element)) + except SystemError: + EncodeNonFiniteOrRaise(write, element) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, unused_deterministic=None): + write(tag_bytes) + try: + write(local_struct_pack(format, value)) + except SystemError: + EncodeNonFiniteOrRaise(write, value) + return EncodeField + + return SpecificEncoder + + +# ==================================================================== +# Here we declare an encoder constructor for each field type. These work +# very similarly to sizer constructors, described earlier. + + +Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder( + wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize) + +UInt32Encoder = UInt64Encoder = _SimpleEncoder( + wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize) + +SInt32Encoder = SInt64Encoder = _ModifiedEncoder( + wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize, + wire_format.ZigZagEncode) + +# Note that Python conveniently guarantees that when using the '<' prefix on +# formats, they will also have the same size across all platforms (as opposed +# to without the prefix, where their sizes depend on the C compiler's basic +# type sizes). +Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, ' str + ValueType = int + + def __init__(self, enum_type): + """Inits EnumTypeWrapper with an EnumDescriptor.""" + self._enum_type = enum_type + self.DESCRIPTOR = enum_type # pylint: disable=invalid-name + + def Name(self, number): # pylint: disable=invalid-name + """Returns a string containing the name of an enum value.""" + try: + return self._enum_type.values_by_number[number].name + except KeyError: + pass # fall out to break exception chaining + + if not isinstance(number, int): + raise TypeError( + 'Enum value for {} must be an int, but got {} {!r}.'.format( + self._enum_type.name, type(number), number)) + else: + # repr here to handle the odd case when you pass in a boolean. + raise ValueError('Enum {} has no name defined for value {!r}'.format( + self._enum_type.name, number)) + + def Value(self, name): # pylint: disable=invalid-name + """Returns the value corresponding to the given enum name.""" + try: + return self._enum_type.values_by_name[name].number + except KeyError: + pass # fall out to break exception chaining + raise ValueError('Enum {} has no value defined for name {!r}'.format( + self._enum_type.name, name)) + + def keys(self): + """Return a list of the string names in the enum. + + Returns: + A list of strs, in the order they were defined in the .proto file. + """ + + return [value_descriptor.name + for value_descriptor in self._enum_type.values] + + def values(self): + """Return a list of the integer values in the enum. + + Returns: + A list of ints, in the order they were defined in the .proto file. + """ + + return [value_descriptor.number + for value_descriptor in self._enum_type.values] + + def items(self): + """Return a list of the (name, value) pairs of the enum. + + Returns: + A list of (str, int) pairs, in the order they were defined + in the .proto file. + """ + return [(value_descriptor.name, value_descriptor.number) + for value_descriptor in self._enum_type.values] + + def __getattr__(self, name): + """Returns the value corresponding to the given enum name.""" + try: + return super( + EnumTypeWrapper, + self).__getattribute__('_enum_type').values_by_name[name].number + except KeyError: + pass # fall out to break exception chaining + raise AttributeError('Enum {} has no value defined for name {!r}'.format( + self._enum_type.name, name)) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/extension_dict.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/extension_dict.py new file mode 100644 index 0000000000..b346cf283e --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/extension_dict.py @@ -0,0 +1,213 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains _ExtensionDict class to represent extensions. +""" + +from google.protobuf.internal import type_checkers +from google.protobuf.descriptor import FieldDescriptor + + +def _VerifyExtensionHandle(message, extension_handle): + """Verify that the given extension handle is valid.""" + + if not isinstance(extension_handle, FieldDescriptor): + raise KeyError('HasExtension() expects an extension handle, got: %s' % + extension_handle) + + if not extension_handle.is_extension: + raise KeyError('"%s" is not an extension.' % extension_handle.full_name) + + if not extension_handle.containing_type: + raise KeyError('"%s" is missing a containing_type.' + % extension_handle.full_name) + + if extension_handle.containing_type is not message.DESCRIPTOR: + raise KeyError('Extension "%s" extends message type "%s", but this ' + 'message is of type "%s".' % + (extension_handle.full_name, + extension_handle.containing_type.full_name, + message.DESCRIPTOR.full_name)) + + +# TODO(robinson): Unify error handling of "unknown extension" crap. +# TODO(robinson): Support iteritems()-style iteration over all +# extensions with the "has" bits turned on? +class _ExtensionDict(object): + + """Dict-like container for Extension fields on proto instances. + + Note that in all cases we expect extension handles to be + FieldDescriptors. + """ + + def __init__(self, extended_message): + """ + Args: + extended_message: Message instance for which we are the Extensions dict. + """ + self._extended_message = extended_message + + def __getitem__(self, extension_handle): + """Returns the current value of the given extension handle.""" + + _VerifyExtensionHandle(self._extended_message, extension_handle) + + result = self._extended_message._fields.get(extension_handle) + if result is not None: + return result + + if extension_handle.label == FieldDescriptor.LABEL_REPEATED: + result = extension_handle._default_constructor(self._extended_message) + elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + message_type = extension_handle.message_type + if not hasattr(message_type, '_concrete_class'): + # pylint: disable=protected-access + self._extended_message._FACTORY.GetPrototype(message_type) + assert getattr(extension_handle.message_type, '_concrete_class', None), ( + 'Uninitialized concrete class found for field %r (message type %r)' + % (extension_handle.full_name, + extension_handle.message_type.full_name)) + result = extension_handle.message_type._concrete_class() + try: + result._SetListener(self._extended_message._listener_for_children) + except ReferenceError: + pass + else: + # Singular scalar -- just return the default without inserting into the + # dict. + return extension_handle.default_value + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + result = self._extended_message._fields.setdefault( + extension_handle, result) + + return result + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + + my_fields = self._extended_message.ListFields() + other_fields = other._extended_message.ListFields() + + # Get rid of non-extension fields. + my_fields = [field for field in my_fields if field.is_extension] + other_fields = [field for field in other_fields if field.is_extension] + + return my_fields == other_fields + + def __ne__(self, other): + return not self == other + + def __len__(self): + fields = self._extended_message.ListFields() + # Get rid of non-extension fields. + extension_fields = [field for field in fields if field[0].is_extension] + return len(extension_fields) + + def __hash__(self): + raise TypeError('unhashable object') + + # Note that this is only meaningful for non-repeated, scalar extension + # fields. Note also that we may have to call _Modified() when we do + # successfully set a field this way, to set any necessary "has" bits in the + # ancestors of the extended message. + def __setitem__(self, extension_handle, value): + """If extension_handle specifies a non-repeated, scalar extension + field, sets the value of that field. + """ + + _VerifyExtensionHandle(self._extended_message, extension_handle) + + if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or + extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE): + raise TypeError( + 'Cannot assign to extension "%s" because it is a repeated or ' + 'composite type.' % extension_handle.full_name) + + # It's slightly wasteful to lookup the type checker each time, + # but we expect this to be a vanishingly uncommon case anyway. + type_checker = type_checkers.GetTypeChecker(extension_handle) + # pylint: disable=protected-access + self._extended_message._fields[extension_handle] = ( + type_checker.CheckValue(value)) + self._extended_message._Modified() + + def __delitem__(self, extension_handle): + self._extended_message.ClearExtension(extension_handle) + + def _FindExtensionByName(self, name): + """Tries to find a known extension with the specified name. + + Args: + name: Extension full name. + + Returns: + Extension field descriptor. + """ + return self._extended_message._extensions_by_name.get(name, None) + + def _FindExtensionByNumber(self, number): + """Tries to find a known extension with the field number. + + Args: + number: Extension field number. + + Returns: + Extension field descriptor. + """ + return self._extended_message._extensions_by_number.get(number, None) + + def __iter__(self): + # Return a generator over the populated extension fields + return (f[0] for f in self._extended_message.ListFields() + if f[0].is_extension) + + def __contains__(self, extension_handle): + _VerifyExtensionHandle(self._extended_message, extension_handle) + + if extension_handle not in self._extended_message._fields: + return False + + if extension_handle.label == FieldDescriptor.LABEL_REPEATED: + return bool(self._extended_message._fields.get(extension_handle)) + + if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + value = self._extended_message._fields.get(extension_handle) + # pylint: disable=protected-access + return value is not None and value._is_present_in_parent + + return True diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/message_listener.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/message_listener.py new file mode 100644 index 0000000000..0fc255a774 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/message_listener.py @@ -0,0 +1,78 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Defines a listener interface for observing certain +state transitions on Message objects. + +Also defines a null implementation of this interface. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + + +class MessageListener(object): + + """Listens for modifications made to a message. Meant to be registered via + Message._SetListener(). + + Attributes: + dirty: If True, then calling Modified() would be a no-op. This can be + used to avoid these calls entirely in the common case. + """ + + def Modified(self): + """Called every time the message is modified in such a way that the parent + message may need to be updated. This currently means either: + (a) The message was modified for the first time, so the parent message + should henceforth mark the message as present. + (b) The message's cached byte size became dirty -- i.e. the message was + modified for the first time after a previous call to ByteSize(). + Therefore the parent should also mark its byte size as dirty. + Note that (a) implies (b), since new objects start out with a client cached + size (zero). However, we document (a) explicitly because it is important. + + Modified() will *only* be called in response to one of these two events -- + not every time the sub-message is modified. + + Note that if the listener's |dirty| attribute is true, then calling + Modified at the moment would be a no-op, so it can be skipped. Performance- + sensitive callers should check this attribute directly before calling since + it will be true most of the time. + """ + + raise NotImplementedError + + +class NullMessageListener(object): + + """No-op MessageListener implementation.""" + + def Modified(self): + pass diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/message_set_extensions_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/message_set_extensions_pb2.py new file mode 100644 index 0000000000..63651a3f19 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/message_set_extensions_pb2.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/message_set_extensions.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n5google/protobuf/internal/message_set_extensions.proto\x12\x18google.protobuf.internal\"\x1e\n\x0eTestMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"\xa5\x01\n\x18TestMessageSetExtension1\x12\t\n\x01i\x18\x0f \x01(\x05\x32~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xab\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension1\"\xa7\x01\n\x18TestMessageSetExtension2\x12\x0b\n\x03str\x18\x19 \x01(\t2~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xca\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension2\"(\n\x18TestMessageSetExtension3\x12\x0c\n\x04text\x18# \x01(\t:\x7f\n\x16message_set_extension3\x12(.google.protobuf.internal.TestMessageSet\x18\xdf\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.message_set_extensions_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestMessageSet.RegisterExtension(message_set_extension3) + TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension']) + TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension']) + + DESCRIPTOR._options = None + _TESTMESSAGESET._options = None + _TESTMESSAGESET._serialized_options = b'\010\001' + _TESTMESSAGESET._serialized_start=83 + _TESTMESSAGESET._serialized_end=113 + _TESTMESSAGESETEXTENSION1._serialized_start=116 + _TESTMESSAGESETEXTENSION1._serialized_end=281 + _TESTMESSAGESETEXTENSION2._serialized_start=284 + _TESTMESSAGESETEXTENSION2._serialized_end=451 + _TESTMESSAGESETEXTENSION3._serialized_start=453 + _TESTMESSAGESETEXTENSION3._serialized_end=493 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/missing_enum_values_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/missing_enum_values_pb2.py new file mode 100644 index 0000000000..5497083197 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/missing_enum_values_pb2.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/missing_enum_values.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n2google/protobuf/internal/missing_enum_values.proto\x12\x1fgoogle.protobuf.python.internal\"\xc1\x02\n\x0eTestEnumValues\x12X\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12X\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12Z\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnumB\x02\x10\x01\"\x1f\n\nNestedEnum\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\"\xd3\x02\n\x15TestMissingEnumValues\x12_\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12_\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12\x61\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnumB\x02\x10\x01\"\x15\n\nNestedEnum\x12\x07\n\x03TWO\x10\x02\"\x1b\n\nJustString\x12\r\n\x05\x64ummy\x18\x01 \x02(\t') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.missing_enum_values_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _TESTENUMVALUES.fields_by_name['packed_nested_enum']._options = None + _TESTENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' + _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._options = None + _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' + _TESTENUMVALUES._serialized_start=88 + _TESTENUMVALUES._serialized_end=409 + _TESTENUMVALUES_NESTEDENUM._serialized_start=378 + _TESTENUMVALUES_NESTEDENUM._serialized_end=409 + _TESTMISSINGENUMVALUES._serialized_start=412 + _TESTMISSINGENUMVALUES._serialized_end=751 + _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_start=730 + _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_end=751 + _JUSTSTRING._serialized_start=753 + _JUSTSTRING._serialized_end=780 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py new file mode 100644 index 0000000000..0953706bac --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_extensions_dynamic.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf.internal import more_extensions_pb2 as google_dot_protobuf_dot_internal_dot_more__extensions__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n6google/protobuf/internal/more_extensions_dynamic.proto\x12\x18google.protobuf.internal\x1a.google/protobuf/internal/more_extensions.proto\"\x1f\n\x12\x44ynamicMessageType\x12\t\n\x01\x61\x18\x01 \x01(\x05:J\n\x17\x64ynamic_int32_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x64 \x01(\x05:z\n\x19\x64ynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x65 \x01(\x0b\x32,.google.protobuf.internal.DynamicMessageType:\x83\x01\n\"repeated_dynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x66 \x03(\x0b\x32,.google.protobuf.internal.DynamicMessageType') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_dynamic_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_int32_extension) + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_message_extension) + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(repeated_dynamic_message_extension) + + DESCRIPTOR._options = None + _DYNAMICMESSAGETYPE._serialized_start=132 + _DYNAMICMESSAGETYPE._serialized_end=163 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_pb2.py new file mode 100644 index 0000000000..1cfa1b7c8b --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_pb2.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_extensions.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.google/protobuf/internal/more_extensions.proto\x12\x18google.protobuf.internal\"\x99\x01\n\x0fTopLevelMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\x12\x43\n\x0enested_message\x18\x02 \x01(\x0b\x32\'.google.protobuf.internal.NestedMessageB\x02(\x01\"R\n\rNestedMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\"K\n\x0f\x45xtendedMessage\x12\x17\n\x0eoptional_int32\x18\xe9\x07 \x01(\x05\x12\x18\n\x0frepeated_string\x18\xea\x07 \x03(\t*\x05\x08\x01\x10\xe8\x07\"-\n\x0e\x46oreignMessage\x12\x1b\n\x13\x66oreign_message_int\x18\x01 \x01(\x05:I\n\x16optional_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x01 \x01(\x05:w\n\x1aoptional_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x02 \x01(\x0b\x32(.google.protobuf.internal.ForeignMessage:I\n\x16repeated_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x03 \x03(\x05:w\n\x1arepeated_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x04 \x03(\x0b\x32(.google.protobuf.internal.ForeignMessage') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + ExtendedMessage.RegisterExtension(optional_int_extension) + ExtendedMessage.RegisterExtension(optional_message_extension) + ExtendedMessage.RegisterExtension(repeated_int_extension) + ExtendedMessage.RegisterExtension(repeated_message_extension) + + DESCRIPTOR._options = None + _TOPLEVELMESSAGE.fields_by_name['submessage']._options = None + _TOPLEVELMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' + _TOPLEVELMESSAGE.fields_by_name['nested_message']._options = None + _TOPLEVELMESSAGE.fields_by_name['nested_message']._serialized_options = b'(\001' + _NESTEDMESSAGE.fields_by_name['submessage']._options = None + _NESTEDMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' + _TOPLEVELMESSAGE._serialized_start=77 + _TOPLEVELMESSAGE._serialized_end=230 + _NESTEDMESSAGE._serialized_start=232 + _NESTEDMESSAGE._serialized_end=314 + _EXTENDEDMESSAGE._serialized_start=316 + _EXTENDEDMESSAGE._serialized_end=391 + _FOREIGNMESSAGE._serialized_start=393 + _FOREIGNMESSAGE._serialized_end=438 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/more_messages_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_messages_pb2.py new file mode 100644 index 0000000000..d7f7115609 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_messages_pb2.py @@ -0,0 +1,556 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_messages.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,google/protobuf/internal/more_messages.proto\x12\x18google.protobuf.internal\"h\n\x10OutOfOrderFields\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05*\x04\x08\x04\x10\x05*\x04\x08\x02\x10\x03\"\xcd\x02\n\x05\x63lass\x12\x1b\n\tint_field\x18\x01 \x01(\x05R\x08json_int\x12\n\n\x02if\x18\x02 \x01(\x05\x12(\n\x02\x61s\x18\x03 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12\x30\n\nenum_field\x18\x04 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12>\n\x11nested_enum_field\x18\x05 \x01(\x0e\x32#.google.protobuf.internal.class.for\x12;\n\x0enested_message\x18\x06 \x01(\x0b\x32#.google.protobuf.internal.class.try\x1a\x1c\n\x03try\x12\r\n\x05\x66ield\x18\x01 \x01(\x05*\x06\x08\xe7\x07\x10\x90N\"\x1c\n\x03\x66or\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04True\x10\x01*\x06\x08\xe7\x07\x10\x90N\"?\n\x0b\x45xtendClass20\n\x06return\x12\x1f.google.protobuf.internal.class\x18\xea\x07 \x01(\x05\"~\n\x0fTestFullKeyword\x12:\n\x06\x66ield1\x18\x01 \x01(\x0b\x32*.google.protobuf.internal.OutOfOrderFields\x12/\n\x06\x66ield2\x18\x02 \x01(\x0b\x32\x1f.google.protobuf.internal.class\"\xa5\x0f\n\x11LotsNestedMessage\x1a\x04\n\x02\x42\x30\x1a\x04\n\x02\x42\x31\x1a\x04\n\x02\x42\x32\x1a\x04\n\x02\x42\x33\x1a\x04\n\x02\x42\x34\x1a\x04\n\x02\x42\x35\x1a\x04\n\x02\x42\x36\x1a\x04\n\x02\x42\x37\x1a\x04\n\x02\x42\x38\x1a\x04\n\x02\x42\x39\x1a\x05\n\x03\x42\x31\x30\x1a\x05\n\x03\x42\x31\x31\x1a\x05\n\x03\x42\x31\x32\x1a\x05\n\x03\x42\x31\x33\x1a\x05\n\x03\x42\x31\x34\x1a\x05\n\x03\x42\x31\x35\x1a\x05\n\x03\x42\x31\x36\x1a\x05\n\x03\x42\x31\x37\x1a\x05\n\x03\x42\x31\x38\x1a\x05\n\x03\x42\x31\x39\x1a\x05\n\x03\x42\x32\x30\x1a\x05\n\x03\x42\x32\x31\x1a\x05\n\x03\x42\x32\x32\x1a\x05\n\x03\x42\x32\x33\x1a\x05\n\x03\x42\x32\x34\x1a\x05\n\x03\x42\x32\x35\x1a\x05\n\x03\x42\x32\x36\x1a\x05\n\x03\x42\x32\x37\x1a\x05\n\x03\x42\x32\x38\x1a\x05\n\x03\x42\x32\x39\x1a\x05\n\x03\x42\x33\x30\x1a\x05\n\x03\x42\x33\x31\x1a\x05\n\x03\x42\x33\x32\x1a\x05\n\x03\x42\x33\x33\x1a\x05\n\x03\x42\x33\x34\x1a\x05\n\x03\x42\x33\x35\x1a\x05\n\x03\x42\x33\x36\x1a\x05\n\x03\x42\x33\x37\x1a\x05\n\x03\x42\x33\x38\x1a\x05\n\x03\x42\x33\x39\x1a\x05\n\x03\x42\x34\x30\x1a\x05\n\x03\x42\x34\x31\x1a\x05\n\x03\x42\x34\x32\x1a\x05\n\x03\x42\x34\x33\x1a\x05\n\x03\x42\x34\x34\x1a\x05\n\x03\x42\x34\x35\x1a\x05\n\x03\x42\x34\x36\x1a\x05\n\x03\x42\x34\x37\x1a\x05\n\x03\x42\x34\x38\x1a\x05\n\x03\x42\x34\x39\x1a\x05\n\x03\x42\x35\x30\x1a\x05\n\x03\x42\x35\x31\x1a\x05\n\x03\x42\x35\x32\x1a\x05\n\x03\x42\x35\x33\x1a\x05\n\x03\x42\x35\x34\x1a\x05\n\x03\x42\x35\x35\x1a\x05\n\x03\x42\x35\x36\x1a\x05\n\x03\x42\x35\x37\x1a\x05\n\x03\x42\x35\x38\x1a\x05\n\x03\x42\x35\x39\x1a\x05\n\x03\x42\x36\x30\x1a\x05\n\x03\x42\x36\x31\x1a\x05\n\x03\x42\x36\x32\x1a\x05\n\x03\x42\x36\x33\x1a\x05\n\x03\x42\x36\x34\x1a\x05\n\x03\x42\x36\x35\x1a\x05\n\x03\x42\x36\x36\x1a\x05\n\x03\x42\x36\x37\x1a\x05\n\x03\x42\x36\x38\x1a\x05\n\x03\x42\x36\x39\x1a\x05\n\x03\x42\x37\x30\x1a\x05\n\x03\x42\x37\x31\x1a\x05\n\x03\x42\x37\x32\x1a\x05\n\x03\x42\x37\x33\x1a\x05\n\x03\x42\x37\x34\x1a\x05\n\x03\x42\x37\x35\x1a\x05\n\x03\x42\x37\x36\x1a\x05\n\x03\x42\x37\x37\x1a\x05\n\x03\x42\x37\x38\x1a\x05\n\x03\x42\x37\x39\x1a\x05\n\x03\x42\x38\x30\x1a\x05\n\x03\x42\x38\x31\x1a\x05\n\x03\x42\x38\x32\x1a\x05\n\x03\x42\x38\x33\x1a\x05\n\x03\x42\x38\x34\x1a\x05\n\x03\x42\x38\x35\x1a\x05\n\x03\x42\x38\x36\x1a\x05\n\x03\x42\x38\x37\x1a\x05\n\x03\x42\x38\x38\x1a\x05\n\x03\x42\x38\x39\x1a\x05\n\x03\x42\x39\x30\x1a\x05\n\x03\x42\x39\x31\x1a\x05\n\x03\x42\x39\x32\x1a\x05\n\x03\x42\x39\x33\x1a\x05\n\x03\x42\x39\x34\x1a\x05\n\x03\x42\x39\x35\x1a\x05\n\x03\x42\x39\x36\x1a\x05\n\x03\x42\x39\x37\x1a\x05\n\x03\x42\x39\x38\x1a\x05\n\x03\x42\x39\x39\x1a\x06\n\x04\x42\x31\x30\x30\x1a\x06\n\x04\x42\x31\x30\x31\x1a\x06\n\x04\x42\x31\x30\x32\x1a\x06\n\x04\x42\x31\x30\x33\x1a\x06\n\x04\x42\x31\x30\x34\x1a\x06\n\x04\x42\x31\x30\x35\x1a\x06\n\x04\x42\x31\x30\x36\x1a\x06\n\x04\x42\x31\x30\x37\x1a\x06\n\x04\x42\x31\x30\x38\x1a\x06\n\x04\x42\x31\x30\x39\x1a\x06\n\x04\x42\x31\x31\x30\x1a\x06\n\x04\x42\x31\x31\x31\x1a\x06\n\x04\x42\x31\x31\x32\x1a\x06\n\x04\x42\x31\x31\x33\x1a\x06\n\x04\x42\x31\x31\x34\x1a\x06\n\x04\x42\x31\x31\x35\x1a\x06\n\x04\x42\x31\x31\x36\x1a\x06\n\x04\x42\x31\x31\x37\x1a\x06\n\x04\x42\x31\x31\x38\x1a\x06\n\x04\x42\x31\x31\x39\x1a\x06\n\x04\x42\x31\x32\x30\x1a\x06\n\x04\x42\x31\x32\x31\x1a\x06\n\x04\x42\x31\x32\x32\x1a\x06\n\x04\x42\x31\x32\x33\x1a\x06\n\x04\x42\x31\x32\x34\x1a\x06\n\x04\x42\x31\x32\x35\x1a\x06\n\x04\x42\x31\x32\x36\x1a\x06\n\x04\x42\x31\x32\x37\x1a\x06\n\x04\x42\x31\x32\x38\x1a\x06\n\x04\x42\x31\x32\x39\x1a\x06\n\x04\x42\x31\x33\x30\x1a\x06\n\x04\x42\x31\x33\x31\x1a\x06\n\x04\x42\x31\x33\x32\x1a\x06\n\x04\x42\x31\x33\x33\x1a\x06\n\x04\x42\x31\x33\x34\x1a\x06\n\x04\x42\x31\x33\x35\x1a\x06\n\x04\x42\x31\x33\x36\x1a\x06\n\x04\x42\x31\x33\x37\x1a\x06\n\x04\x42\x31\x33\x38\x1a\x06\n\x04\x42\x31\x33\x39\x1a\x06\n\x04\x42\x31\x34\x30\x1a\x06\n\x04\x42\x31\x34\x31\x1a\x06\n\x04\x42\x31\x34\x32\x1a\x06\n\x04\x42\x31\x34\x33\x1a\x06\n\x04\x42\x31\x34\x34\x1a\x06\n\x04\x42\x31\x34\x35\x1a\x06\n\x04\x42\x31\x34\x36\x1a\x06\n\x04\x42\x31\x34\x37\x1a\x06\n\x04\x42\x31\x34\x38\x1a\x06\n\x04\x42\x31\x34\x39\x1a\x06\n\x04\x42\x31\x35\x30\x1a\x06\n\x04\x42\x31\x35\x31\x1a\x06\n\x04\x42\x31\x35\x32\x1a\x06\n\x04\x42\x31\x35\x33\x1a\x06\n\x04\x42\x31\x35\x34\x1a\x06\n\x04\x42\x31\x35\x35\x1a\x06\n\x04\x42\x31\x35\x36\x1a\x06\n\x04\x42\x31\x35\x37\x1a\x06\n\x04\x42\x31\x35\x38\x1a\x06\n\x04\x42\x31\x35\x39\x1a\x06\n\x04\x42\x31\x36\x30\x1a\x06\n\x04\x42\x31\x36\x31\x1a\x06\n\x04\x42\x31\x36\x32\x1a\x06\n\x04\x42\x31\x36\x33\x1a\x06\n\x04\x42\x31\x36\x34\x1a\x06\n\x04\x42\x31\x36\x35\x1a\x06\n\x04\x42\x31\x36\x36\x1a\x06\n\x04\x42\x31\x36\x37\x1a\x06\n\x04\x42\x31\x36\x38\x1a\x06\n\x04\x42\x31\x36\x39\x1a\x06\n\x04\x42\x31\x37\x30\x1a\x06\n\x04\x42\x31\x37\x31\x1a\x06\n\x04\x42\x31\x37\x32\x1a\x06\n\x04\x42\x31\x37\x33\x1a\x06\n\x04\x42\x31\x37\x34\x1a\x06\n\x04\x42\x31\x37\x35\x1a\x06\n\x04\x42\x31\x37\x36\x1a\x06\n\x04\x42\x31\x37\x37\x1a\x06\n\x04\x42\x31\x37\x38\x1a\x06\n\x04\x42\x31\x37\x39\x1a\x06\n\x04\x42\x31\x38\x30\x1a\x06\n\x04\x42\x31\x38\x31\x1a\x06\n\x04\x42\x31\x38\x32\x1a\x06\n\x04\x42\x31\x38\x33\x1a\x06\n\x04\x42\x31\x38\x34\x1a\x06\n\x04\x42\x31\x38\x35\x1a\x06\n\x04\x42\x31\x38\x36\x1a\x06\n\x04\x42\x31\x38\x37\x1a\x06\n\x04\x42\x31\x38\x38\x1a\x06\n\x04\x42\x31\x38\x39\x1a\x06\n\x04\x42\x31\x39\x30\x1a\x06\n\x04\x42\x31\x39\x31\x1a\x06\n\x04\x42\x31\x39\x32\x1a\x06\n\x04\x42\x31\x39\x33\x1a\x06\n\x04\x42\x31\x39\x34\x1a\x06\n\x04\x42\x31\x39\x35\x1a\x06\n\x04\x42\x31\x39\x36\x1a\x06\n\x04\x42\x31\x39\x37\x1a\x06\n\x04\x42\x31\x39\x38\x1a\x06\n\x04\x42\x31\x39\x39\x1a\x06\n\x04\x42\x32\x30\x30\x1a\x06\n\x04\x42\x32\x30\x31\x1a\x06\n\x04\x42\x32\x30\x32\x1a\x06\n\x04\x42\x32\x30\x33\x1a\x06\n\x04\x42\x32\x30\x34\x1a\x06\n\x04\x42\x32\x30\x35\x1a\x06\n\x04\x42\x32\x30\x36\x1a\x06\n\x04\x42\x32\x30\x37\x1a\x06\n\x04\x42\x32\x30\x38\x1a\x06\n\x04\x42\x32\x30\x39\x1a\x06\n\x04\x42\x32\x31\x30\x1a\x06\n\x04\x42\x32\x31\x31\x1a\x06\n\x04\x42\x32\x31\x32\x1a\x06\n\x04\x42\x32\x31\x33\x1a\x06\n\x04\x42\x32\x31\x34\x1a\x06\n\x04\x42\x32\x31\x35\x1a\x06\n\x04\x42\x32\x31\x36\x1a\x06\n\x04\x42\x32\x31\x37\x1a\x06\n\x04\x42\x32\x31\x38\x1a\x06\n\x04\x42\x32\x31\x39\x1a\x06\n\x04\x42\x32\x32\x30\x1a\x06\n\x04\x42\x32\x32\x31\x1a\x06\n\x04\x42\x32\x32\x32\x1a\x06\n\x04\x42\x32\x32\x33\x1a\x06\n\x04\x42\x32\x32\x34\x1a\x06\n\x04\x42\x32\x32\x35\x1a\x06\n\x04\x42\x32\x32\x36\x1a\x06\n\x04\x42\x32\x32\x37\x1a\x06\n\x04\x42\x32\x32\x38\x1a\x06\n\x04\x42\x32\x32\x39\x1a\x06\n\x04\x42\x32\x33\x30\x1a\x06\n\x04\x42\x32\x33\x31\x1a\x06\n\x04\x42\x32\x33\x32\x1a\x06\n\x04\x42\x32\x33\x33\x1a\x06\n\x04\x42\x32\x33\x34\x1a\x06\n\x04\x42\x32\x33\x35\x1a\x06\n\x04\x42\x32\x33\x36\x1a\x06\n\x04\x42\x32\x33\x37\x1a\x06\n\x04\x42\x32\x33\x38\x1a\x06\n\x04\x42\x32\x33\x39\x1a\x06\n\x04\x42\x32\x34\x30\x1a\x06\n\x04\x42\x32\x34\x31\x1a\x06\n\x04\x42\x32\x34\x32\x1a\x06\n\x04\x42\x32\x34\x33\x1a\x06\n\x04\x42\x32\x34\x34\x1a\x06\n\x04\x42\x32\x34\x35\x1a\x06\n\x04\x42\x32\x34\x36\x1a\x06\n\x04\x42\x32\x34\x37\x1a\x06\n\x04\x42\x32\x34\x38\x1a\x06\n\x04\x42\x32\x34\x39\x1a\x06\n\x04\x42\x32\x35\x30\x1a\x06\n\x04\x42\x32\x35\x31\x1a\x06\n\x04\x42\x32\x35\x32\x1a\x06\n\x04\x42\x32\x35\x33\x1a\x06\n\x04\x42\x32\x35\x34\x1a\x06\n\x04\x42\x32\x35\x35*\x1b\n\x02is\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04\x65lse\x10\x01:C\n\x0foptional_uint64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x04 \x01(\x04:B\n\x0eoptional_int64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x02 \x01(\x03:2\n\x08\x63ontinue\x12\x1f.google.protobuf.internal.class\x18\xe9\x07 \x01(\x05:2\n\x04with\x12#.google.protobuf.internal.class.try\x18\xe9\x07 \x01(\x05') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_messages_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + OutOfOrderFields.RegisterExtension(optional_uint64) + OutOfOrderFields.RegisterExtension(optional_int64) + globals()['class'].RegisterExtension(globals()['continue']) + getattr(globals()['class'], 'try').RegisterExtension(globals()['with']) + globals()['class'].RegisterExtension(_EXTENDCLASS.extensions_by_name['return']) + + DESCRIPTOR._options = None + _IS._serialized_start=2669 + _IS._serialized_end=2696 + _OUTOFORDERFIELDS._serialized_start=74 + _OUTOFORDERFIELDS._serialized_end=178 + _CLASS._serialized_start=181 + _CLASS._serialized_end=514 + _CLASS_TRY._serialized_start=448 + _CLASS_TRY._serialized_end=476 + _CLASS_FOR._serialized_start=478 + _CLASS_FOR._serialized_end=506 + _EXTENDCLASS._serialized_start=516 + _EXTENDCLASS._serialized_end=579 + _TESTFULLKEYWORD._serialized_start=581 + _TESTFULLKEYWORD._serialized_end=707 + _LOTSNESTEDMESSAGE._serialized_start=710 + _LOTSNESTEDMESSAGE._serialized_end=2667 + _LOTSNESTEDMESSAGE_B0._serialized_start=731 + _LOTSNESTEDMESSAGE_B0._serialized_end=735 + _LOTSNESTEDMESSAGE_B1._serialized_start=737 + _LOTSNESTEDMESSAGE_B1._serialized_end=741 + _LOTSNESTEDMESSAGE_B2._serialized_start=743 + _LOTSNESTEDMESSAGE_B2._serialized_end=747 + _LOTSNESTEDMESSAGE_B3._serialized_start=749 + _LOTSNESTEDMESSAGE_B3._serialized_end=753 + _LOTSNESTEDMESSAGE_B4._serialized_start=755 + _LOTSNESTEDMESSAGE_B4._serialized_end=759 + _LOTSNESTEDMESSAGE_B5._serialized_start=761 + _LOTSNESTEDMESSAGE_B5._serialized_end=765 + _LOTSNESTEDMESSAGE_B6._serialized_start=767 + _LOTSNESTEDMESSAGE_B6._serialized_end=771 + _LOTSNESTEDMESSAGE_B7._serialized_start=773 + _LOTSNESTEDMESSAGE_B7._serialized_end=777 + _LOTSNESTEDMESSAGE_B8._serialized_start=779 + _LOTSNESTEDMESSAGE_B8._serialized_end=783 + _LOTSNESTEDMESSAGE_B9._serialized_start=785 + _LOTSNESTEDMESSAGE_B9._serialized_end=789 + _LOTSNESTEDMESSAGE_B10._serialized_start=791 + _LOTSNESTEDMESSAGE_B10._serialized_end=796 + _LOTSNESTEDMESSAGE_B11._serialized_start=798 + _LOTSNESTEDMESSAGE_B11._serialized_end=803 + _LOTSNESTEDMESSAGE_B12._serialized_start=805 + _LOTSNESTEDMESSAGE_B12._serialized_end=810 + _LOTSNESTEDMESSAGE_B13._serialized_start=812 + _LOTSNESTEDMESSAGE_B13._serialized_end=817 + _LOTSNESTEDMESSAGE_B14._serialized_start=819 + _LOTSNESTEDMESSAGE_B14._serialized_end=824 + _LOTSNESTEDMESSAGE_B15._serialized_start=826 + _LOTSNESTEDMESSAGE_B15._serialized_end=831 + _LOTSNESTEDMESSAGE_B16._serialized_start=833 + _LOTSNESTEDMESSAGE_B16._serialized_end=838 + _LOTSNESTEDMESSAGE_B17._serialized_start=840 + _LOTSNESTEDMESSAGE_B17._serialized_end=845 + _LOTSNESTEDMESSAGE_B18._serialized_start=847 + _LOTSNESTEDMESSAGE_B18._serialized_end=852 + _LOTSNESTEDMESSAGE_B19._serialized_start=854 + _LOTSNESTEDMESSAGE_B19._serialized_end=859 + _LOTSNESTEDMESSAGE_B20._serialized_start=861 + _LOTSNESTEDMESSAGE_B20._serialized_end=866 + _LOTSNESTEDMESSAGE_B21._serialized_start=868 + _LOTSNESTEDMESSAGE_B21._serialized_end=873 + _LOTSNESTEDMESSAGE_B22._serialized_start=875 + _LOTSNESTEDMESSAGE_B22._serialized_end=880 + _LOTSNESTEDMESSAGE_B23._serialized_start=882 + _LOTSNESTEDMESSAGE_B23._serialized_end=887 + _LOTSNESTEDMESSAGE_B24._serialized_start=889 + _LOTSNESTEDMESSAGE_B24._serialized_end=894 + _LOTSNESTEDMESSAGE_B25._serialized_start=896 + _LOTSNESTEDMESSAGE_B25._serialized_end=901 + _LOTSNESTEDMESSAGE_B26._serialized_start=903 + _LOTSNESTEDMESSAGE_B26._serialized_end=908 + _LOTSNESTEDMESSAGE_B27._serialized_start=910 + _LOTSNESTEDMESSAGE_B27._serialized_end=915 + _LOTSNESTEDMESSAGE_B28._serialized_start=917 + _LOTSNESTEDMESSAGE_B28._serialized_end=922 + _LOTSNESTEDMESSAGE_B29._serialized_start=924 + _LOTSNESTEDMESSAGE_B29._serialized_end=929 + _LOTSNESTEDMESSAGE_B30._serialized_start=931 + _LOTSNESTEDMESSAGE_B30._serialized_end=936 + _LOTSNESTEDMESSAGE_B31._serialized_start=938 + _LOTSNESTEDMESSAGE_B31._serialized_end=943 + _LOTSNESTEDMESSAGE_B32._serialized_start=945 + _LOTSNESTEDMESSAGE_B32._serialized_end=950 + _LOTSNESTEDMESSAGE_B33._serialized_start=952 + _LOTSNESTEDMESSAGE_B33._serialized_end=957 + _LOTSNESTEDMESSAGE_B34._serialized_start=959 + _LOTSNESTEDMESSAGE_B34._serialized_end=964 + _LOTSNESTEDMESSAGE_B35._serialized_start=966 + _LOTSNESTEDMESSAGE_B35._serialized_end=971 + _LOTSNESTEDMESSAGE_B36._serialized_start=973 + _LOTSNESTEDMESSAGE_B36._serialized_end=978 + _LOTSNESTEDMESSAGE_B37._serialized_start=980 + _LOTSNESTEDMESSAGE_B37._serialized_end=985 + _LOTSNESTEDMESSAGE_B38._serialized_start=987 + _LOTSNESTEDMESSAGE_B38._serialized_end=992 + _LOTSNESTEDMESSAGE_B39._serialized_start=994 + _LOTSNESTEDMESSAGE_B39._serialized_end=999 + _LOTSNESTEDMESSAGE_B40._serialized_start=1001 + _LOTSNESTEDMESSAGE_B40._serialized_end=1006 + _LOTSNESTEDMESSAGE_B41._serialized_start=1008 + _LOTSNESTEDMESSAGE_B41._serialized_end=1013 + _LOTSNESTEDMESSAGE_B42._serialized_start=1015 + _LOTSNESTEDMESSAGE_B42._serialized_end=1020 + _LOTSNESTEDMESSAGE_B43._serialized_start=1022 + _LOTSNESTEDMESSAGE_B43._serialized_end=1027 + _LOTSNESTEDMESSAGE_B44._serialized_start=1029 + _LOTSNESTEDMESSAGE_B44._serialized_end=1034 + _LOTSNESTEDMESSAGE_B45._serialized_start=1036 + _LOTSNESTEDMESSAGE_B45._serialized_end=1041 + _LOTSNESTEDMESSAGE_B46._serialized_start=1043 + _LOTSNESTEDMESSAGE_B46._serialized_end=1048 + _LOTSNESTEDMESSAGE_B47._serialized_start=1050 + _LOTSNESTEDMESSAGE_B47._serialized_end=1055 + _LOTSNESTEDMESSAGE_B48._serialized_start=1057 + _LOTSNESTEDMESSAGE_B48._serialized_end=1062 + _LOTSNESTEDMESSAGE_B49._serialized_start=1064 + _LOTSNESTEDMESSAGE_B49._serialized_end=1069 + _LOTSNESTEDMESSAGE_B50._serialized_start=1071 + _LOTSNESTEDMESSAGE_B50._serialized_end=1076 + _LOTSNESTEDMESSAGE_B51._serialized_start=1078 + _LOTSNESTEDMESSAGE_B51._serialized_end=1083 + _LOTSNESTEDMESSAGE_B52._serialized_start=1085 + _LOTSNESTEDMESSAGE_B52._serialized_end=1090 + _LOTSNESTEDMESSAGE_B53._serialized_start=1092 + _LOTSNESTEDMESSAGE_B53._serialized_end=1097 + _LOTSNESTEDMESSAGE_B54._serialized_start=1099 + _LOTSNESTEDMESSAGE_B54._serialized_end=1104 + _LOTSNESTEDMESSAGE_B55._serialized_start=1106 + _LOTSNESTEDMESSAGE_B55._serialized_end=1111 + _LOTSNESTEDMESSAGE_B56._serialized_start=1113 + _LOTSNESTEDMESSAGE_B56._serialized_end=1118 + _LOTSNESTEDMESSAGE_B57._serialized_start=1120 + _LOTSNESTEDMESSAGE_B57._serialized_end=1125 + _LOTSNESTEDMESSAGE_B58._serialized_start=1127 + _LOTSNESTEDMESSAGE_B58._serialized_end=1132 + _LOTSNESTEDMESSAGE_B59._serialized_start=1134 + _LOTSNESTEDMESSAGE_B59._serialized_end=1139 + _LOTSNESTEDMESSAGE_B60._serialized_start=1141 + _LOTSNESTEDMESSAGE_B60._serialized_end=1146 + _LOTSNESTEDMESSAGE_B61._serialized_start=1148 + _LOTSNESTEDMESSAGE_B61._serialized_end=1153 + _LOTSNESTEDMESSAGE_B62._serialized_start=1155 + _LOTSNESTEDMESSAGE_B62._serialized_end=1160 + _LOTSNESTEDMESSAGE_B63._serialized_start=1162 + _LOTSNESTEDMESSAGE_B63._serialized_end=1167 + _LOTSNESTEDMESSAGE_B64._serialized_start=1169 + _LOTSNESTEDMESSAGE_B64._serialized_end=1174 + _LOTSNESTEDMESSAGE_B65._serialized_start=1176 + _LOTSNESTEDMESSAGE_B65._serialized_end=1181 + _LOTSNESTEDMESSAGE_B66._serialized_start=1183 + _LOTSNESTEDMESSAGE_B66._serialized_end=1188 + _LOTSNESTEDMESSAGE_B67._serialized_start=1190 + _LOTSNESTEDMESSAGE_B67._serialized_end=1195 + _LOTSNESTEDMESSAGE_B68._serialized_start=1197 + _LOTSNESTEDMESSAGE_B68._serialized_end=1202 + _LOTSNESTEDMESSAGE_B69._serialized_start=1204 + _LOTSNESTEDMESSAGE_B69._serialized_end=1209 + _LOTSNESTEDMESSAGE_B70._serialized_start=1211 + _LOTSNESTEDMESSAGE_B70._serialized_end=1216 + _LOTSNESTEDMESSAGE_B71._serialized_start=1218 + _LOTSNESTEDMESSAGE_B71._serialized_end=1223 + _LOTSNESTEDMESSAGE_B72._serialized_start=1225 + _LOTSNESTEDMESSAGE_B72._serialized_end=1230 + _LOTSNESTEDMESSAGE_B73._serialized_start=1232 + _LOTSNESTEDMESSAGE_B73._serialized_end=1237 + _LOTSNESTEDMESSAGE_B74._serialized_start=1239 + _LOTSNESTEDMESSAGE_B74._serialized_end=1244 + _LOTSNESTEDMESSAGE_B75._serialized_start=1246 + _LOTSNESTEDMESSAGE_B75._serialized_end=1251 + _LOTSNESTEDMESSAGE_B76._serialized_start=1253 + _LOTSNESTEDMESSAGE_B76._serialized_end=1258 + _LOTSNESTEDMESSAGE_B77._serialized_start=1260 + _LOTSNESTEDMESSAGE_B77._serialized_end=1265 + _LOTSNESTEDMESSAGE_B78._serialized_start=1267 + _LOTSNESTEDMESSAGE_B78._serialized_end=1272 + _LOTSNESTEDMESSAGE_B79._serialized_start=1274 + _LOTSNESTEDMESSAGE_B79._serialized_end=1279 + _LOTSNESTEDMESSAGE_B80._serialized_start=1281 + _LOTSNESTEDMESSAGE_B80._serialized_end=1286 + _LOTSNESTEDMESSAGE_B81._serialized_start=1288 + _LOTSNESTEDMESSAGE_B81._serialized_end=1293 + _LOTSNESTEDMESSAGE_B82._serialized_start=1295 + _LOTSNESTEDMESSAGE_B82._serialized_end=1300 + _LOTSNESTEDMESSAGE_B83._serialized_start=1302 + _LOTSNESTEDMESSAGE_B83._serialized_end=1307 + _LOTSNESTEDMESSAGE_B84._serialized_start=1309 + _LOTSNESTEDMESSAGE_B84._serialized_end=1314 + _LOTSNESTEDMESSAGE_B85._serialized_start=1316 + _LOTSNESTEDMESSAGE_B85._serialized_end=1321 + _LOTSNESTEDMESSAGE_B86._serialized_start=1323 + _LOTSNESTEDMESSAGE_B86._serialized_end=1328 + _LOTSNESTEDMESSAGE_B87._serialized_start=1330 + _LOTSNESTEDMESSAGE_B87._serialized_end=1335 + _LOTSNESTEDMESSAGE_B88._serialized_start=1337 + _LOTSNESTEDMESSAGE_B88._serialized_end=1342 + _LOTSNESTEDMESSAGE_B89._serialized_start=1344 + _LOTSNESTEDMESSAGE_B89._serialized_end=1349 + _LOTSNESTEDMESSAGE_B90._serialized_start=1351 + _LOTSNESTEDMESSAGE_B90._serialized_end=1356 + _LOTSNESTEDMESSAGE_B91._serialized_start=1358 + _LOTSNESTEDMESSAGE_B91._serialized_end=1363 + _LOTSNESTEDMESSAGE_B92._serialized_start=1365 + _LOTSNESTEDMESSAGE_B92._serialized_end=1370 + _LOTSNESTEDMESSAGE_B93._serialized_start=1372 + _LOTSNESTEDMESSAGE_B93._serialized_end=1377 + _LOTSNESTEDMESSAGE_B94._serialized_start=1379 + _LOTSNESTEDMESSAGE_B94._serialized_end=1384 + _LOTSNESTEDMESSAGE_B95._serialized_start=1386 + _LOTSNESTEDMESSAGE_B95._serialized_end=1391 + _LOTSNESTEDMESSAGE_B96._serialized_start=1393 + _LOTSNESTEDMESSAGE_B96._serialized_end=1398 + _LOTSNESTEDMESSAGE_B97._serialized_start=1400 + _LOTSNESTEDMESSAGE_B97._serialized_end=1405 + _LOTSNESTEDMESSAGE_B98._serialized_start=1407 + _LOTSNESTEDMESSAGE_B98._serialized_end=1412 + _LOTSNESTEDMESSAGE_B99._serialized_start=1414 + _LOTSNESTEDMESSAGE_B99._serialized_end=1419 + _LOTSNESTEDMESSAGE_B100._serialized_start=1421 + _LOTSNESTEDMESSAGE_B100._serialized_end=1427 + _LOTSNESTEDMESSAGE_B101._serialized_start=1429 + _LOTSNESTEDMESSAGE_B101._serialized_end=1435 + _LOTSNESTEDMESSAGE_B102._serialized_start=1437 + _LOTSNESTEDMESSAGE_B102._serialized_end=1443 + _LOTSNESTEDMESSAGE_B103._serialized_start=1445 + _LOTSNESTEDMESSAGE_B103._serialized_end=1451 + _LOTSNESTEDMESSAGE_B104._serialized_start=1453 + _LOTSNESTEDMESSAGE_B104._serialized_end=1459 + _LOTSNESTEDMESSAGE_B105._serialized_start=1461 + _LOTSNESTEDMESSAGE_B105._serialized_end=1467 + _LOTSNESTEDMESSAGE_B106._serialized_start=1469 + _LOTSNESTEDMESSAGE_B106._serialized_end=1475 + _LOTSNESTEDMESSAGE_B107._serialized_start=1477 + _LOTSNESTEDMESSAGE_B107._serialized_end=1483 + _LOTSNESTEDMESSAGE_B108._serialized_start=1485 + _LOTSNESTEDMESSAGE_B108._serialized_end=1491 + _LOTSNESTEDMESSAGE_B109._serialized_start=1493 + _LOTSNESTEDMESSAGE_B109._serialized_end=1499 + _LOTSNESTEDMESSAGE_B110._serialized_start=1501 + _LOTSNESTEDMESSAGE_B110._serialized_end=1507 + _LOTSNESTEDMESSAGE_B111._serialized_start=1509 + _LOTSNESTEDMESSAGE_B111._serialized_end=1515 + _LOTSNESTEDMESSAGE_B112._serialized_start=1517 + _LOTSNESTEDMESSAGE_B112._serialized_end=1523 + _LOTSNESTEDMESSAGE_B113._serialized_start=1525 + _LOTSNESTEDMESSAGE_B113._serialized_end=1531 + _LOTSNESTEDMESSAGE_B114._serialized_start=1533 + _LOTSNESTEDMESSAGE_B114._serialized_end=1539 + _LOTSNESTEDMESSAGE_B115._serialized_start=1541 + _LOTSNESTEDMESSAGE_B115._serialized_end=1547 + _LOTSNESTEDMESSAGE_B116._serialized_start=1549 + _LOTSNESTEDMESSAGE_B116._serialized_end=1555 + _LOTSNESTEDMESSAGE_B117._serialized_start=1557 + _LOTSNESTEDMESSAGE_B117._serialized_end=1563 + _LOTSNESTEDMESSAGE_B118._serialized_start=1565 + _LOTSNESTEDMESSAGE_B118._serialized_end=1571 + _LOTSNESTEDMESSAGE_B119._serialized_start=1573 + _LOTSNESTEDMESSAGE_B119._serialized_end=1579 + _LOTSNESTEDMESSAGE_B120._serialized_start=1581 + _LOTSNESTEDMESSAGE_B120._serialized_end=1587 + _LOTSNESTEDMESSAGE_B121._serialized_start=1589 + _LOTSNESTEDMESSAGE_B121._serialized_end=1595 + _LOTSNESTEDMESSAGE_B122._serialized_start=1597 + _LOTSNESTEDMESSAGE_B122._serialized_end=1603 + _LOTSNESTEDMESSAGE_B123._serialized_start=1605 + _LOTSNESTEDMESSAGE_B123._serialized_end=1611 + _LOTSNESTEDMESSAGE_B124._serialized_start=1613 + _LOTSNESTEDMESSAGE_B124._serialized_end=1619 + _LOTSNESTEDMESSAGE_B125._serialized_start=1621 + _LOTSNESTEDMESSAGE_B125._serialized_end=1627 + _LOTSNESTEDMESSAGE_B126._serialized_start=1629 + _LOTSNESTEDMESSAGE_B126._serialized_end=1635 + _LOTSNESTEDMESSAGE_B127._serialized_start=1637 + _LOTSNESTEDMESSAGE_B127._serialized_end=1643 + _LOTSNESTEDMESSAGE_B128._serialized_start=1645 + _LOTSNESTEDMESSAGE_B128._serialized_end=1651 + _LOTSNESTEDMESSAGE_B129._serialized_start=1653 + _LOTSNESTEDMESSAGE_B129._serialized_end=1659 + _LOTSNESTEDMESSAGE_B130._serialized_start=1661 + _LOTSNESTEDMESSAGE_B130._serialized_end=1667 + _LOTSNESTEDMESSAGE_B131._serialized_start=1669 + _LOTSNESTEDMESSAGE_B131._serialized_end=1675 + _LOTSNESTEDMESSAGE_B132._serialized_start=1677 + _LOTSNESTEDMESSAGE_B132._serialized_end=1683 + _LOTSNESTEDMESSAGE_B133._serialized_start=1685 + _LOTSNESTEDMESSAGE_B133._serialized_end=1691 + _LOTSNESTEDMESSAGE_B134._serialized_start=1693 + _LOTSNESTEDMESSAGE_B134._serialized_end=1699 + _LOTSNESTEDMESSAGE_B135._serialized_start=1701 + _LOTSNESTEDMESSAGE_B135._serialized_end=1707 + _LOTSNESTEDMESSAGE_B136._serialized_start=1709 + _LOTSNESTEDMESSAGE_B136._serialized_end=1715 + _LOTSNESTEDMESSAGE_B137._serialized_start=1717 + _LOTSNESTEDMESSAGE_B137._serialized_end=1723 + _LOTSNESTEDMESSAGE_B138._serialized_start=1725 + _LOTSNESTEDMESSAGE_B138._serialized_end=1731 + _LOTSNESTEDMESSAGE_B139._serialized_start=1733 + _LOTSNESTEDMESSAGE_B139._serialized_end=1739 + _LOTSNESTEDMESSAGE_B140._serialized_start=1741 + _LOTSNESTEDMESSAGE_B140._serialized_end=1747 + _LOTSNESTEDMESSAGE_B141._serialized_start=1749 + _LOTSNESTEDMESSAGE_B141._serialized_end=1755 + _LOTSNESTEDMESSAGE_B142._serialized_start=1757 + _LOTSNESTEDMESSAGE_B142._serialized_end=1763 + _LOTSNESTEDMESSAGE_B143._serialized_start=1765 + _LOTSNESTEDMESSAGE_B143._serialized_end=1771 + _LOTSNESTEDMESSAGE_B144._serialized_start=1773 + _LOTSNESTEDMESSAGE_B144._serialized_end=1779 + _LOTSNESTEDMESSAGE_B145._serialized_start=1781 + _LOTSNESTEDMESSAGE_B145._serialized_end=1787 + _LOTSNESTEDMESSAGE_B146._serialized_start=1789 + _LOTSNESTEDMESSAGE_B146._serialized_end=1795 + _LOTSNESTEDMESSAGE_B147._serialized_start=1797 + _LOTSNESTEDMESSAGE_B147._serialized_end=1803 + _LOTSNESTEDMESSAGE_B148._serialized_start=1805 + _LOTSNESTEDMESSAGE_B148._serialized_end=1811 + _LOTSNESTEDMESSAGE_B149._serialized_start=1813 + _LOTSNESTEDMESSAGE_B149._serialized_end=1819 + _LOTSNESTEDMESSAGE_B150._serialized_start=1821 + _LOTSNESTEDMESSAGE_B150._serialized_end=1827 + _LOTSNESTEDMESSAGE_B151._serialized_start=1829 + _LOTSNESTEDMESSAGE_B151._serialized_end=1835 + _LOTSNESTEDMESSAGE_B152._serialized_start=1837 + _LOTSNESTEDMESSAGE_B152._serialized_end=1843 + _LOTSNESTEDMESSAGE_B153._serialized_start=1845 + _LOTSNESTEDMESSAGE_B153._serialized_end=1851 + _LOTSNESTEDMESSAGE_B154._serialized_start=1853 + _LOTSNESTEDMESSAGE_B154._serialized_end=1859 + _LOTSNESTEDMESSAGE_B155._serialized_start=1861 + _LOTSNESTEDMESSAGE_B155._serialized_end=1867 + _LOTSNESTEDMESSAGE_B156._serialized_start=1869 + _LOTSNESTEDMESSAGE_B156._serialized_end=1875 + _LOTSNESTEDMESSAGE_B157._serialized_start=1877 + _LOTSNESTEDMESSAGE_B157._serialized_end=1883 + _LOTSNESTEDMESSAGE_B158._serialized_start=1885 + _LOTSNESTEDMESSAGE_B158._serialized_end=1891 + _LOTSNESTEDMESSAGE_B159._serialized_start=1893 + _LOTSNESTEDMESSAGE_B159._serialized_end=1899 + _LOTSNESTEDMESSAGE_B160._serialized_start=1901 + _LOTSNESTEDMESSAGE_B160._serialized_end=1907 + _LOTSNESTEDMESSAGE_B161._serialized_start=1909 + _LOTSNESTEDMESSAGE_B161._serialized_end=1915 + _LOTSNESTEDMESSAGE_B162._serialized_start=1917 + _LOTSNESTEDMESSAGE_B162._serialized_end=1923 + _LOTSNESTEDMESSAGE_B163._serialized_start=1925 + _LOTSNESTEDMESSAGE_B163._serialized_end=1931 + _LOTSNESTEDMESSAGE_B164._serialized_start=1933 + _LOTSNESTEDMESSAGE_B164._serialized_end=1939 + _LOTSNESTEDMESSAGE_B165._serialized_start=1941 + _LOTSNESTEDMESSAGE_B165._serialized_end=1947 + _LOTSNESTEDMESSAGE_B166._serialized_start=1949 + _LOTSNESTEDMESSAGE_B166._serialized_end=1955 + _LOTSNESTEDMESSAGE_B167._serialized_start=1957 + _LOTSNESTEDMESSAGE_B167._serialized_end=1963 + _LOTSNESTEDMESSAGE_B168._serialized_start=1965 + _LOTSNESTEDMESSAGE_B168._serialized_end=1971 + _LOTSNESTEDMESSAGE_B169._serialized_start=1973 + _LOTSNESTEDMESSAGE_B169._serialized_end=1979 + _LOTSNESTEDMESSAGE_B170._serialized_start=1981 + _LOTSNESTEDMESSAGE_B170._serialized_end=1987 + _LOTSNESTEDMESSAGE_B171._serialized_start=1989 + _LOTSNESTEDMESSAGE_B171._serialized_end=1995 + _LOTSNESTEDMESSAGE_B172._serialized_start=1997 + _LOTSNESTEDMESSAGE_B172._serialized_end=2003 + _LOTSNESTEDMESSAGE_B173._serialized_start=2005 + _LOTSNESTEDMESSAGE_B173._serialized_end=2011 + _LOTSNESTEDMESSAGE_B174._serialized_start=2013 + _LOTSNESTEDMESSAGE_B174._serialized_end=2019 + _LOTSNESTEDMESSAGE_B175._serialized_start=2021 + _LOTSNESTEDMESSAGE_B175._serialized_end=2027 + _LOTSNESTEDMESSAGE_B176._serialized_start=2029 + _LOTSNESTEDMESSAGE_B176._serialized_end=2035 + _LOTSNESTEDMESSAGE_B177._serialized_start=2037 + _LOTSNESTEDMESSAGE_B177._serialized_end=2043 + _LOTSNESTEDMESSAGE_B178._serialized_start=2045 + _LOTSNESTEDMESSAGE_B178._serialized_end=2051 + _LOTSNESTEDMESSAGE_B179._serialized_start=2053 + _LOTSNESTEDMESSAGE_B179._serialized_end=2059 + _LOTSNESTEDMESSAGE_B180._serialized_start=2061 + _LOTSNESTEDMESSAGE_B180._serialized_end=2067 + _LOTSNESTEDMESSAGE_B181._serialized_start=2069 + _LOTSNESTEDMESSAGE_B181._serialized_end=2075 + _LOTSNESTEDMESSAGE_B182._serialized_start=2077 + _LOTSNESTEDMESSAGE_B182._serialized_end=2083 + _LOTSNESTEDMESSAGE_B183._serialized_start=2085 + _LOTSNESTEDMESSAGE_B183._serialized_end=2091 + _LOTSNESTEDMESSAGE_B184._serialized_start=2093 + _LOTSNESTEDMESSAGE_B184._serialized_end=2099 + _LOTSNESTEDMESSAGE_B185._serialized_start=2101 + _LOTSNESTEDMESSAGE_B185._serialized_end=2107 + _LOTSNESTEDMESSAGE_B186._serialized_start=2109 + _LOTSNESTEDMESSAGE_B186._serialized_end=2115 + _LOTSNESTEDMESSAGE_B187._serialized_start=2117 + _LOTSNESTEDMESSAGE_B187._serialized_end=2123 + _LOTSNESTEDMESSAGE_B188._serialized_start=2125 + _LOTSNESTEDMESSAGE_B188._serialized_end=2131 + _LOTSNESTEDMESSAGE_B189._serialized_start=2133 + _LOTSNESTEDMESSAGE_B189._serialized_end=2139 + _LOTSNESTEDMESSAGE_B190._serialized_start=2141 + _LOTSNESTEDMESSAGE_B190._serialized_end=2147 + _LOTSNESTEDMESSAGE_B191._serialized_start=2149 + _LOTSNESTEDMESSAGE_B191._serialized_end=2155 + _LOTSNESTEDMESSAGE_B192._serialized_start=2157 + _LOTSNESTEDMESSAGE_B192._serialized_end=2163 + _LOTSNESTEDMESSAGE_B193._serialized_start=2165 + _LOTSNESTEDMESSAGE_B193._serialized_end=2171 + _LOTSNESTEDMESSAGE_B194._serialized_start=2173 + _LOTSNESTEDMESSAGE_B194._serialized_end=2179 + _LOTSNESTEDMESSAGE_B195._serialized_start=2181 + _LOTSNESTEDMESSAGE_B195._serialized_end=2187 + _LOTSNESTEDMESSAGE_B196._serialized_start=2189 + _LOTSNESTEDMESSAGE_B196._serialized_end=2195 + _LOTSNESTEDMESSAGE_B197._serialized_start=2197 + _LOTSNESTEDMESSAGE_B197._serialized_end=2203 + _LOTSNESTEDMESSAGE_B198._serialized_start=2205 + _LOTSNESTEDMESSAGE_B198._serialized_end=2211 + _LOTSNESTEDMESSAGE_B199._serialized_start=2213 + _LOTSNESTEDMESSAGE_B199._serialized_end=2219 + _LOTSNESTEDMESSAGE_B200._serialized_start=2221 + _LOTSNESTEDMESSAGE_B200._serialized_end=2227 + _LOTSNESTEDMESSAGE_B201._serialized_start=2229 + _LOTSNESTEDMESSAGE_B201._serialized_end=2235 + _LOTSNESTEDMESSAGE_B202._serialized_start=2237 + _LOTSNESTEDMESSAGE_B202._serialized_end=2243 + _LOTSNESTEDMESSAGE_B203._serialized_start=2245 + _LOTSNESTEDMESSAGE_B203._serialized_end=2251 + _LOTSNESTEDMESSAGE_B204._serialized_start=2253 + _LOTSNESTEDMESSAGE_B204._serialized_end=2259 + _LOTSNESTEDMESSAGE_B205._serialized_start=2261 + _LOTSNESTEDMESSAGE_B205._serialized_end=2267 + _LOTSNESTEDMESSAGE_B206._serialized_start=2269 + _LOTSNESTEDMESSAGE_B206._serialized_end=2275 + _LOTSNESTEDMESSAGE_B207._serialized_start=2277 + _LOTSNESTEDMESSAGE_B207._serialized_end=2283 + _LOTSNESTEDMESSAGE_B208._serialized_start=2285 + _LOTSNESTEDMESSAGE_B208._serialized_end=2291 + _LOTSNESTEDMESSAGE_B209._serialized_start=2293 + _LOTSNESTEDMESSAGE_B209._serialized_end=2299 + _LOTSNESTEDMESSAGE_B210._serialized_start=2301 + _LOTSNESTEDMESSAGE_B210._serialized_end=2307 + _LOTSNESTEDMESSAGE_B211._serialized_start=2309 + _LOTSNESTEDMESSAGE_B211._serialized_end=2315 + _LOTSNESTEDMESSAGE_B212._serialized_start=2317 + _LOTSNESTEDMESSAGE_B212._serialized_end=2323 + _LOTSNESTEDMESSAGE_B213._serialized_start=2325 + _LOTSNESTEDMESSAGE_B213._serialized_end=2331 + _LOTSNESTEDMESSAGE_B214._serialized_start=2333 + _LOTSNESTEDMESSAGE_B214._serialized_end=2339 + _LOTSNESTEDMESSAGE_B215._serialized_start=2341 + _LOTSNESTEDMESSAGE_B215._serialized_end=2347 + _LOTSNESTEDMESSAGE_B216._serialized_start=2349 + _LOTSNESTEDMESSAGE_B216._serialized_end=2355 + _LOTSNESTEDMESSAGE_B217._serialized_start=2357 + _LOTSNESTEDMESSAGE_B217._serialized_end=2363 + _LOTSNESTEDMESSAGE_B218._serialized_start=2365 + _LOTSNESTEDMESSAGE_B218._serialized_end=2371 + _LOTSNESTEDMESSAGE_B219._serialized_start=2373 + _LOTSNESTEDMESSAGE_B219._serialized_end=2379 + _LOTSNESTEDMESSAGE_B220._serialized_start=2381 + _LOTSNESTEDMESSAGE_B220._serialized_end=2387 + _LOTSNESTEDMESSAGE_B221._serialized_start=2389 + _LOTSNESTEDMESSAGE_B221._serialized_end=2395 + _LOTSNESTEDMESSAGE_B222._serialized_start=2397 + _LOTSNESTEDMESSAGE_B222._serialized_end=2403 + _LOTSNESTEDMESSAGE_B223._serialized_start=2405 + _LOTSNESTEDMESSAGE_B223._serialized_end=2411 + _LOTSNESTEDMESSAGE_B224._serialized_start=2413 + _LOTSNESTEDMESSAGE_B224._serialized_end=2419 + _LOTSNESTEDMESSAGE_B225._serialized_start=2421 + _LOTSNESTEDMESSAGE_B225._serialized_end=2427 + _LOTSNESTEDMESSAGE_B226._serialized_start=2429 + _LOTSNESTEDMESSAGE_B226._serialized_end=2435 + _LOTSNESTEDMESSAGE_B227._serialized_start=2437 + _LOTSNESTEDMESSAGE_B227._serialized_end=2443 + _LOTSNESTEDMESSAGE_B228._serialized_start=2445 + _LOTSNESTEDMESSAGE_B228._serialized_end=2451 + _LOTSNESTEDMESSAGE_B229._serialized_start=2453 + _LOTSNESTEDMESSAGE_B229._serialized_end=2459 + _LOTSNESTEDMESSAGE_B230._serialized_start=2461 + _LOTSNESTEDMESSAGE_B230._serialized_end=2467 + _LOTSNESTEDMESSAGE_B231._serialized_start=2469 + _LOTSNESTEDMESSAGE_B231._serialized_end=2475 + _LOTSNESTEDMESSAGE_B232._serialized_start=2477 + _LOTSNESTEDMESSAGE_B232._serialized_end=2483 + _LOTSNESTEDMESSAGE_B233._serialized_start=2485 + _LOTSNESTEDMESSAGE_B233._serialized_end=2491 + _LOTSNESTEDMESSAGE_B234._serialized_start=2493 + _LOTSNESTEDMESSAGE_B234._serialized_end=2499 + _LOTSNESTEDMESSAGE_B235._serialized_start=2501 + _LOTSNESTEDMESSAGE_B235._serialized_end=2507 + _LOTSNESTEDMESSAGE_B236._serialized_start=2509 + _LOTSNESTEDMESSAGE_B236._serialized_end=2515 + _LOTSNESTEDMESSAGE_B237._serialized_start=2517 + _LOTSNESTEDMESSAGE_B237._serialized_end=2523 + _LOTSNESTEDMESSAGE_B238._serialized_start=2525 + _LOTSNESTEDMESSAGE_B238._serialized_end=2531 + _LOTSNESTEDMESSAGE_B239._serialized_start=2533 + _LOTSNESTEDMESSAGE_B239._serialized_end=2539 + _LOTSNESTEDMESSAGE_B240._serialized_start=2541 + _LOTSNESTEDMESSAGE_B240._serialized_end=2547 + _LOTSNESTEDMESSAGE_B241._serialized_start=2549 + _LOTSNESTEDMESSAGE_B241._serialized_end=2555 + _LOTSNESTEDMESSAGE_B242._serialized_start=2557 + _LOTSNESTEDMESSAGE_B242._serialized_end=2563 + _LOTSNESTEDMESSAGE_B243._serialized_start=2565 + _LOTSNESTEDMESSAGE_B243._serialized_end=2571 + _LOTSNESTEDMESSAGE_B244._serialized_start=2573 + _LOTSNESTEDMESSAGE_B244._serialized_end=2579 + _LOTSNESTEDMESSAGE_B245._serialized_start=2581 + _LOTSNESTEDMESSAGE_B245._serialized_end=2587 + _LOTSNESTEDMESSAGE_B246._serialized_start=2589 + _LOTSNESTEDMESSAGE_B246._serialized_end=2595 + _LOTSNESTEDMESSAGE_B247._serialized_start=2597 + _LOTSNESTEDMESSAGE_B247._serialized_end=2603 + _LOTSNESTEDMESSAGE_B248._serialized_start=2605 + _LOTSNESTEDMESSAGE_B248._serialized_end=2611 + _LOTSNESTEDMESSAGE_B249._serialized_start=2613 + _LOTSNESTEDMESSAGE_B249._serialized_end=2619 + _LOTSNESTEDMESSAGE_B250._serialized_start=2621 + _LOTSNESTEDMESSAGE_B250._serialized_end=2627 + _LOTSNESTEDMESSAGE_B251._serialized_start=2629 + _LOTSNESTEDMESSAGE_B251._serialized_end=2635 + _LOTSNESTEDMESSAGE_B252._serialized_start=2637 + _LOTSNESTEDMESSAGE_B252._serialized_end=2643 + _LOTSNESTEDMESSAGE_B253._serialized_start=2645 + _LOTSNESTEDMESSAGE_B253._serialized_end=2651 + _LOTSNESTEDMESSAGE_B254._serialized_start=2653 + _LOTSNESTEDMESSAGE_B254._serialized_end=2659 + _LOTSNESTEDMESSAGE_B255._serialized_start=2661 + _LOTSNESTEDMESSAGE_B255._serialized_end=2667 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/no_package_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/no_package_pb2.py new file mode 100644 index 0000000000..d46dee080a --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/no_package_pb2.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/no_package.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)google/protobuf/internal/no_package.proto\";\n\x10NoPackageMessage\x12\'\n\x0fno_package_enum\x18\x01 \x01(\x0e\x32\x0e.NoPackageEnum*?\n\rNoPackageEnum\x12\x16\n\x12NO_PACKAGE_VALUE_0\x10\x00\x12\x16\n\x12NO_PACKAGE_VALUE_1\x10\x01') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.no_package_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _NOPACKAGEENUM._serialized_start=106 + _NOPACKAGEENUM._serialized_end=169 + _NOPACKAGEMESSAGE._serialized_start=45 + _NOPACKAGEMESSAGE._serialized_end=104 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/python_message.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/python_message.py new file mode 100644 index 0000000000..2921d5cb6e --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/python_message.py @@ -0,0 +1,1539 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# This code is meant to work on Python 2.4 and above only. +# +# TODO(robinson): Helpers for verbose, common checks like seeing if a +# descriptor's cpp_type is CPPTYPE_MESSAGE. + +"""Contains a metaclass and helper functions used to create +protocol message classes from Descriptor objects at runtime. + +Recall that a metaclass is the "type" of a class. +(A class is to a metaclass what an instance is to a class.) + +In this case, we use the GeneratedProtocolMessageType metaclass +to inject all the useful functionality into the classes +output by the protocol compiler at compile-time. + +The upshot of all this is that the real implementation +details for ALL pure-Python protocol buffers are *here in +this file*. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +from io import BytesIO +import struct +import sys +import weakref + +# We use "as" to avoid name collisions with variables. +from google.protobuf.internal import api_implementation +from google.protobuf.internal import containers +from google.protobuf.internal import decoder +from google.protobuf.internal import encoder +from google.protobuf.internal import enum_type_wrapper +from google.protobuf.internal import extension_dict +from google.protobuf.internal import message_listener as message_listener_mod +from google.protobuf.internal import type_checkers +from google.protobuf.internal import well_known_types +from google.protobuf.internal import wire_format +from google.protobuf import descriptor as descriptor_mod +from google.protobuf import message as message_mod +from google.protobuf import text_format + +_FieldDescriptor = descriptor_mod.FieldDescriptor +_AnyFullTypeName = 'google.protobuf.Any' +_ExtensionDict = extension_dict._ExtensionDict + +class GeneratedProtocolMessageType(type): + + """Metaclass for protocol message classes created at runtime from Descriptors. + + We add implementations for all methods described in the Message class. We + also create properties to allow getting/setting all fields in the protocol + message. Finally, we create slots to prevent users from accidentally + "setting" nonexistent fields in the protocol message, which then wouldn't get + serialized / deserialized properly. + + The protocol compiler currently uses this metaclass to create protocol + message classes at runtime. Clients can also manually create their own + classes at runtime, as in this example: + + mydescriptor = Descriptor(.....) + factory = symbol_database.Default() + factory.pool.AddDescriptor(mydescriptor) + MyProtoClass = factory.GetPrototype(mydescriptor) + myproto_instance = MyProtoClass() + myproto.foo_field = 23 + ... + """ + + # Must be consistent with the protocol-compiler code in + # proto2/compiler/internal/generator.*. + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __new__(cls, name, bases, dictionary): + """Custom allocation for runtime-generated class types. + + We override __new__ because this is apparently the only place + where we can meaningfully set __slots__ on the class we're creating(?). + (The interplay between metaclasses and slots is not very well-documented). + + Args: + name: Name of the class (ignored, but required by the + metaclass protocol). + bases: Base classes of the class we're constructing. + (Should be message.Message). We ignore this field, but + it's required by the metaclass protocol + dictionary: The class dictionary of the class we're + constructing. dictionary[_DESCRIPTOR_KEY] must contain + a Descriptor object describing this protocol message + type. + + Returns: + Newly-allocated class. + + Raises: + RuntimeError: Generated code only work with python cpp extension. + """ + descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] + + if isinstance(descriptor, str): + raise RuntimeError('The generated code only work with python cpp ' + 'extension, but it is using pure python runtime.') + + # If a concrete class already exists for this descriptor, don't try to + # create another. Doing so will break any messages that already exist with + # the existing class. + # + # The C++ implementation appears to have its own internal `PyMessageFactory` + # to achieve similar results. + # + # This most commonly happens in `text_format.py` when using descriptors from + # a custom pool; it calls symbol_database.Global().getPrototype() on a + # descriptor which already has an existing concrete class. + new_class = getattr(descriptor, '_concrete_class', None) + if new_class: + return new_class + + if descriptor.full_name in well_known_types.WKTBASES: + bases += (well_known_types.WKTBASES[descriptor.full_name],) + _AddClassAttributesForNestedExtensions(descriptor, dictionary) + _AddSlots(descriptor, dictionary) + + superclass = super(GeneratedProtocolMessageType, cls) + new_class = superclass.__new__(cls, name, bases, dictionary) + return new_class + + def __init__(cls, name, bases, dictionary): + """Here we perform the majority of our work on the class. + We add enum getters, an __init__ method, implementations + of all Message methods, and properties for all fields + in the protocol type. + + Args: + name: Name of the class (ignored, but required by the + metaclass protocol). + bases: Base classes of the class we're constructing. + (Should be message.Message). We ignore this field, but + it's required by the metaclass protocol + dictionary: The class dictionary of the class we're + constructing. dictionary[_DESCRIPTOR_KEY] must contain + a Descriptor object describing this protocol message + type. + """ + descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] + + # If this is an _existing_ class looked up via `_concrete_class` in the + # __new__ method above, then we don't need to re-initialize anything. + existing_class = getattr(descriptor, '_concrete_class', None) + if existing_class: + assert existing_class is cls, ( + 'Duplicate `GeneratedProtocolMessageType` created for descriptor %r' + % (descriptor.full_name)) + return + + cls._decoders_by_tag = {} + if (descriptor.has_options and + descriptor.GetOptions().message_set_wire_format): + cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = ( + decoder.MessageSetItemDecoder(descriptor), None) + + # Attach stuff to each FieldDescriptor for quick lookup later on. + for field in descriptor.fields: + _AttachFieldHelpers(cls, field) + + descriptor._concrete_class = cls # pylint: disable=protected-access + _AddEnumValues(descriptor, cls) + _AddInitMethod(descriptor, cls) + _AddPropertiesForFields(descriptor, cls) + _AddPropertiesForExtensions(descriptor, cls) + _AddStaticMethods(cls) + _AddMessageMethods(descriptor, cls) + _AddPrivateHelperMethods(descriptor, cls) + + superclass = super(GeneratedProtocolMessageType, cls) + superclass.__init__(name, bases, dictionary) + + +# Stateless helpers for GeneratedProtocolMessageType below. +# Outside clients should not access these directly. +# +# I opted not to make any of these methods on the metaclass, to make it more +# clear that I'm not really using any state there and to keep clients from +# thinking that they have direct access to these construction helpers. + + +def _PropertyName(proto_field_name): + """Returns the name of the public property attribute which + clients can use to get and (in some cases) set the value + of a protocol message field. + + Args: + proto_field_name: The protocol message field name, exactly + as it appears (or would appear) in a .proto file. + """ + # TODO(robinson): Escape Python keywords (e.g., yield), and test this support. + # nnorwitz makes my day by writing: + # """ + # FYI. See the keyword module in the stdlib. This could be as simple as: + # + # if keyword.iskeyword(proto_field_name): + # return proto_field_name + "_" + # return proto_field_name + # """ + # Kenton says: The above is a BAD IDEA. People rely on being able to use + # getattr() and setattr() to reflectively manipulate field values. If we + # rename the properties, then every such user has to also make sure to apply + # the same transformation. Note that currently if you name a field "yield", + # you can still access it just fine using getattr/setattr -- it's not even + # that cumbersome to do so. + # TODO(kenton): Remove this method entirely if/when everyone agrees with my + # position. + return proto_field_name + + +def _AddSlots(message_descriptor, dictionary): + """Adds a __slots__ entry to dictionary, containing the names of all valid + attributes for this message type. + + Args: + message_descriptor: A Descriptor instance describing this message type. + dictionary: Class dictionary to which we'll add a '__slots__' entry. + """ + dictionary['__slots__'] = ['_cached_byte_size', + '_cached_byte_size_dirty', + '_fields', + '_unknown_fields', + '_unknown_field_set', + '_is_present_in_parent', + '_listener', + '_listener_for_children', + '__weakref__', + '_oneofs'] + + +def _IsMessageSetExtension(field): + return (field.is_extension and + field.containing_type.has_options and + field.containing_type.GetOptions().message_set_wire_format and + field.type == _FieldDescriptor.TYPE_MESSAGE and + field.label == _FieldDescriptor.LABEL_OPTIONAL) + + +def _IsMapField(field): + return (field.type == _FieldDescriptor.TYPE_MESSAGE and + field.message_type.has_options and + field.message_type.GetOptions().map_entry) + + +def _IsMessageMapField(field): + value_type = field.message_type.fields_by_name['value'] + return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE + + +def _AttachFieldHelpers(cls, field_descriptor): + is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED) + is_packable = (is_repeated and + wire_format.IsTypePackable(field_descriptor.type)) + is_proto3 = field_descriptor.containing_type.syntax == 'proto3' + if not is_packable: + is_packed = False + elif field_descriptor.containing_type.syntax == 'proto2': + is_packed = (field_descriptor.has_options and + field_descriptor.GetOptions().packed) + else: + has_packed_false = (field_descriptor.has_options and + field_descriptor.GetOptions().HasField('packed') and + field_descriptor.GetOptions().packed == False) + is_packed = not has_packed_false + is_map_entry = _IsMapField(field_descriptor) + + if is_map_entry: + field_encoder = encoder.MapEncoder(field_descriptor) + sizer = encoder.MapSizer(field_descriptor, + _IsMessageMapField(field_descriptor)) + elif _IsMessageSetExtension(field_descriptor): + field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number) + sizer = encoder.MessageSetItemSizer(field_descriptor.number) + else: + field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type]( + field_descriptor.number, is_repeated, is_packed) + sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type]( + field_descriptor.number, is_repeated, is_packed) + + field_descriptor._encoder = field_encoder + field_descriptor._sizer = sizer + field_descriptor._default_constructor = _DefaultValueConstructorForField( + field_descriptor) + + def AddDecoder(wiretype, is_packed): + tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype) + decode_type = field_descriptor.type + if (decode_type == _FieldDescriptor.TYPE_ENUM and + type_checkers.SupportsOpenEnums(field_descriptor)): + decode_type = _FieldDescriptor.TYPE_INT32 + + oneof_descriptor = None + clear_if_default = False + if field_descriptor.containing_oneof is not None: + oneof_descriptor = field_descriptor + elif (is_proto3 and not is_repeated and + field_descriptor.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE): + clear_if_default = True + + if is_map_entry: + is_message_map = _IsMessageMapField(field_descriptor) + + field_decoder = decoder.MapDecoder( + field_descriptor, _GetInitializeDefaultForMap(field_descriptor), + is_message_map) + elif decode_type == _FieldDescriptor.TYPE_STRING: + field_decoder = decoder.StringDecoder( + field_descriptor.number, is_repeated, is_packed, + field_descriptor, field_descriptor._default_constructor, + clear_if_default) + elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( + field_descriptor.number, is_repeated, is_packed, + field_descriptor, field_descriptor._default_constructor) + else: + field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( + field_descriptor.number, is_repeated, is_packed, + # pylint: disable=protected-access + field_descriptor, field_descriptor._default_constructor, + clear_if_default) + + cls._decoders_by_tag[tag_bytes] = (field_decoder, oneof_descriptor) + + AddDecoder(type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type], + False) + + if is_repeated and wire_format.IsTypePackable(field_descriptor.type): + # To support wire compatibility of adding packed = true, add a decoder for + # packed values regardless of the field's options. + AddDecoder(wire_format.WIRETYPE_LENGTH_DELIMITED, True) + + +def _AddClassAttributesForNestedExtensions(descriptor, dictionary): + extensions = descriptor.extensions_by_name + for extension_name, extension_field in extensions.items(): + assert extension_name not in dictionary + dictionary[extension_name] = extension_field + + +def _AddEnumValues(descriptor, cls): + """Sets class-level attributes for all enum fields defined in this message. + + Also exporting a class-level object that can name enum values. + + Args: + descriptor: Descriptor object for this message type. + cls: Class we're constructing for this message type. + """ + for enum_type in descriptor.enum_types: + setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type)) + for enum_value in enum_type.values: + setattr(cls, enum_value.name, enum_value.number) + + +def _GetInitializeDefaultForMap(field): + if field.label != _FieldDescriptor.LABEL_REPEATED: + raise ValueError('map_entry set on non-repeated field %s' % ( + field.name)) + fields_by_name = field.message_type.fields_by_name + key_checker = type_checkers.GetTypeChecker(fields_by_name['key']) + + value_field = fields_by_name['value'] + if _IsMessageMapField(field): + def MakeMessageMapDefault(message): + return containers.MessageMap( + message._listener_for_children, value_field.message_type, key_checker, + field.message_type) + return MakeMessageMapDefault + else: + value_checker = type_checkers.GetTypeChecker(value_field) + def MakePrimitiveMapDefault(message): + return containers.ScalarMap( + message._listener_for_children, key_checker, value_checker, + field.message_type) + return MakePrimitiveMapDefault + +def _DefaultValueConstructorForField(field): + """Returns a function which returns a default value for a field. + + Args: + field: FieldDescriptor object for this field. + + The returned function has one argument: + message: Message instance containing this field, or a weakref proxy + of same. + + That function in turn returns a default value for this field. The default + value may refer back to |message| via a weak reference. + """ + + if _IsMapField(field): + return _GetInitializeDefaultForMap(field) + + if field.label == _FieldDescriptor.LABEL_REPEATED: + if field.has_default_value and field.default_value != []: + raise ValueError('Repeated field default value not empty list: %s' % ( + field.default_value)) + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + # We can't look at _concrete_class yet since it might not have + # been set. (Depends on order in which we initialize the classes). + message_type = field.message_type + def MakeRepeatedMessageDefault(message): + return containers.RepeatedCompositeFieldContainer( + message._listener_for_children, field.message_type) + return MakeRepeatedMessageDefault + else: + type_checker = type_checkers.GetTypeChecker(field) + def MakeRepeatedScalarDefault(message): + return containers.RepeatedScalarFieldContainer( + message._listener_for_children, type_checker) + return MakeRepeatedScalarDefault + + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + # _concrete_class may not yet be initialized. + message_type = field.message_type + def MakeSubMessageDefault(message): + assert getattr(message_type, '_concrete_class', None), ( + 'Uninitialized concrete class found for field %r (message type %r)' + % (field.full_name, message_type.full_name)) + result = message_type._concrete_class() + result._SetListener( + _OneofListener(message, field) + if field.containing_oneof is not None + else message._listener_for_children) + return result + return MakeSubMessageDefault + + def MakeScalarDefault(message): + # TODO(protobuf-team): This may be broken since there may not be + # default_value. Combine with has_default_value somehow. + return field.default_value + return MakeScalarDefault + + +def _ReraiseTypeErrorWithFieldName(message_name, field_name): + """Re-raise the currently-handled TypeError with the field name added.""" + exc = sys.exc_info()[1] + if len(exc.args) == 1 and type(exc) is TypeError: + # simple TypeError; add field name to exception message + exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name)) + + # re-raise possibly-amended exception with original traceback: + raise exc.with_traceback(sys.exc_info()[2]) + + +def _AddInitMethod(message_descriptor, cls): + """Adds an __init__ method to cls.""" + + def _GetIntegerEnumValue(enum_type, value): + """Convert a string or integer enum value to an integer. + + If the value is a string, it is converted to the enum value in + enum_type with the same name. If the value is not a string, it's + returned as-is. (No conversion or bounds-checking is done.) + """ + if isinstance(value, str): + try: + return enum_type.values_by_name[value].number + except KeyError: + raise ValueError('Enum type %s: unknown label "%s"' % ( + enum_type.full_name, value)) + return value + + def init(self, **kwargs): + self._cached_byte_size = 0 + self._cached_byte_size_dirty = len(kwargs) > 0 + self._fields = {} + # Contains a mapping from oneof field descriptors to the descriptor + # of the currently set field in that oneof field. + self._oneofs = {} + + # _unknown_fields is () when empty for efficiency, and will be turned into + # a list if fields are added. + self._unknown_fields = () + # _unknown_field_set is None when empty for efficiency, and will be + # turned into UnknownFieldSet struct if fields are added. + self._unknown_field_set = None # pylint: disable=protected-access + self._is_present_in_parent = False + self._listener = message_listener_mod.NullMessageListener() + self._listener_for_children = _Listener(self) + for field_name, field_value in kwargs.items(): + field = _GetFieldByName(message_descriptor, field_name) + if field is None: + raise TypeError('%s() got an unexpected keyword argument "%s"' % + (message_descriptor.name, field_name)) + if field_value is None: + # field=None is the same as no field at all. + continue + if field.label == _FieldDescriptor.LABEL_REPEATED: + copy = field._default_constructor(self) + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite + if _IsMapField(field): + if _IsMessageMapField(field): + for key in field_value: + copy[key].MergeFrom(field_value[key]) + else: + copy.update(field_value) + else: + for val in field_value: + if isinstance(val, dict): + copy.add(**val) + else: + copy.add().MergeFrom(val) + else: # Scalar + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + field_value = [_GetIntegerEnumValue(field.enum_type, val) + for val in field_value] + copy.extend(field_value) + self._fields[field] = copy + elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + copy = field._default_constructor(self) + new_val = field_value + if isinstance(field_value, dict): + new_val = field.message_type._concrete_class(**field_value) + try: + copy.MergeFrom(new_val) + except TypeError: + _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) + self._fields[field] = copy + else: + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + field_value = _GetIntegerEnumValue(field.enum_type, field_value) + try: + setattr(self, field_name, field_value) + except TypeError: + _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) + + init.__module__ = None + init.__doc__ = None + cls.__init__ = init + + +def _GetFieldByName(message_descriptor, field_name): + """Returns a field descriptor by field name. + + Args: + message_descriptor: A Descriptor describing all fields in message. + field_name: The name of the field to retrieve. + Returns: + The field descriptor associated with the field name. + """ + try: + return message_descriptor.fields_by_name[field_name] + except KeyError: + raise ValueError('Protocol message %s has no "%s" field.' % + (message_descriptor.name, field_name)) + + +def _AddPropertiesForFields(descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + for field in descriptor.fields: + _AddPropertiesForField(field, cls) + + if descriptor.is_extendable: + # _ExtensionDict is just an adaptor with no state so we allocate a new one + # every time it is accessed. + cls.Extensions = property(lambda self: _ExtensionDict(self)) + + +def _AddPropertiesForField(field, cls): + """Adds a public property for a protocol message field. + Clients can use this property to get and (in the case + of non-repeated scalar fields) directly set the value + of a protocol message field. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + # Catch it if we add other types that we should + # handle specially here. + assert _FieldDescriptor.MAX_CPPTYPE == 10 + + constant_name = field.name.upper() + '_FIELD_NUMBER' + setattr(cls, constant_name, field.number) + + if field.label == _FieldDescriptor.LABEL_REPEATED: + _AddPropertiesForRepeatedField(field, cls) + elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + _AddPropertiesForNonRepeatedCompositeField(field, cls) + else: + _AddPropertiesForNonRepeatedScalarField(field, cls) + + +class _FieldProperty(property): + __slots__ = ('DESCRIPTOR',) + + def __init__(self, descriptor, getter, setter, doc): + property.__init__(self, getter, setter, doc=doc) + self.DESCRIPTOR = descriptor + + +def _AddPropertiesForRepeatedField(field, cls): + """Adds a public property for a "repeated" protocol message field. Clients + can use this property to get the value of the field, which will be either a + RepeatedScalarFieldContainer or RepeatedCompositeFieldContainer (see + below). + + Note that when clients add values to these containers, we perform + type-checking in the case of repeated scalar fields, and we also set any + necessary "has" bits as a side-effect. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + + def getter(self): + field_value = self._fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + field_value = self._fields.setdefault(field, field_value) + return field_value + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + # We define a setter just so we can throw an exception with a more + # helpful error message. + def setter(self, new_value): + raise AttributeError('Assignment not allowed to repeated field ' + '"%s" in protocol message object.' % proto_field_name) + + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForNonRepeatedScalarField(field, cls): + """Adds a public property for a nonrepeated, scalar protocol message field. + Clients can use this property to get and directly set the value of the field. + Note that when the client sets the value of a field by using this property, + all necessary "has" bits are set as a side-effect, and we also perform + type-checking. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + type_checker = type_checkers.GetTypeChecker(field) + default_value = field.default_value + is_proto3 = field.containing_type.syntax == 'proto3' + + def getter(self): + # TODO(protobuf-team): This may be broken since there may not be + # default_value. Combine with has_default_value somehow. + return self._fields.get(field, default_value) + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + clear_when_set_to_default = is_proto3 and not field.containing_oneof + + def field_setter(self, new_value): + # pylint: disable=protected-access + # Testing the value for truthiness captures all of the proto3 defaults + # (0, 0.0, enum 0, and False). + try: + new_value = type_checker.CheckValue(new_value) + except TypeError as e: + raise TypeError( + 'Cannot set %s to %.1024r: %s' % (field.full_name, new_value, e)) + if clear_when_set_to_default and not new_value: + self._fields.pop(field, None) + else: + self._fields[field] = new_value + # Check _cached_byte_size_dirty inline to improve performance, since scalar + # setters are called frequently. + if not self._cached_byte_size_dirty: + self._Modified() + + if field.containing_oneof: + def setter(self, new_value): + field_setter(self, new_value) + self._UpdateOneofState(field) + else: + setter = field_setter + + setter.__module__ = None + setter.__doc__ = 'Setter for %s.' % proto_field_name + + # Add a property to encapsulate the getter/setter. + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForNonRepeatedCompositeField(field, cls): + """Adds a public property for a nonrepeated, composite protocol message field. + A composite field is a "group" or "message" field. + + Clients can use this property to get the value of the field, but cannot + assign to the property directly. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + # TODO(robinson): Remove duplication with similar method + # for non-repeated scalars. + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + + def getter(self): + field_value = self._fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + field_value = self._fields.setdefault(field, field_value) + return field_value + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + # We define a setter just so we can throw an exception with a more + # helpful error message. + def setter(self, new_value): + raise AttributeError('Assignment not allowed to composite field ' + '"%s" in protocol message object.' % proto_field_name) + + # Add a property to encapsulate the getter. + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForExtensions(descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + extensions = descriptor.extensions_by_name + for extension_name, extension_field in extensions.items(): + constant_name = extension_name.upper() + '_FIELD_NUMBER' + setattr(cls, constant_name, extension_field.number) + + # TODO(amauryfa): Migrate all users of these attributes to functions like + # pool.FindExtensionByNumber(descriptor). + if descriptor.file is not None: + # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. + pool = descriptor.file.pool + cls._extensions_by_number = pool._extensions_by_number[descriptor] + cls._extensions_by_name = pool._extensions_by_name[descriptor] + +def _AddStaticMethods(cls): + # TODO(robinson): This probably needs to be thread-safe(?) + def RegisterExtension(extension_handle): + extension_handle.containing_type = cls.DESCRIPTOR + # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. + # pylint: disable=protected-access + cls.DESCRIPTOR.file.pool._AddExtensionDescriptor(extension_handle) + _AttachFieldHelpers(cls, extension_handle) + cls.RegisterExtension = staticmethod(RegisterExtension) + + def FromString(s): + message = cls() + message.MergeFromString(s) + return message + cls.FromString = staticmethod(FromString) + + +def _IsPresent(item): + """Given a (FieldDescriptor, value) tuple from _fields, return true if the + value should be included in the list returned by ListFields().""" + + if item[0].label == _FieldDescriptor.LABEL_REPEATED: + return bool(item[1]) + elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + return item[1]._is_present_in_parent + else: + return True + + +def _AddListFieldsMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def ListFields(self): + all_fields = [item for item in self._fields.items() if _IsPresent(item)] + all_fields.sort(key = lambda item: item[0].number) + return all_fields + + cls.ListFields = ListFields + +_PROTO3_ERROR_TEMPLATE = \ + ('Protocol message %s has no non-repeated submessage field "%s" ' + 'nor marked as optional') +_PROTO2_ERROR_TEMPLATE = 'Protocol message %s has no non-repeated field "%s"' + +def _AddHasFieldMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + is_proto3 = (message_descriptor.syntax == "proto3") + error_msg = _PROTO3_ERROR_TEMPLATE if is_proto3 else _PROTO2_ERROR_TEMPLATE + + hassable_fields = {} + for field in message_descriptor.fields: + if field.label == _FieldDescriptor.LABEL_REPEATED: + continue + # For proto3, only submessages and fields inside a oneof have presence. + if (is_proto3 and field.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE and + not field.containing_oneof): + continue + hassable_fields[field.name] = field + + # Has methods are supported for oneof descriptors. + for oneof in message_descriptor.oneofs: + hassable_fields[oneof.name] = oneof + + def HasField(self, field_name): + try: + field = hassable_fields[field_name] + except KeyError: + raise ValueError(error_msg % (message_descriptor.full_name, field_name)) + + if isinstance(field, descriptor_mod.OneofDescriptor): + try: + return HasField(self, self._oneofs[field].name) + except KeyError: + return False + else: + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + value = self._fields.get(field) + return value is not None and value._is_present_in_parent + else: + return field in self._fields + + cls.HasField = HasField + + +def _AddClearFieldMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def ClearField(self, field_name): + try: + field = message_descriptor.fields_by_name[field_name] + except KeyError: + try: + field = message_descriptor.oneofs_by_name[field_name] + if field in self._oneofs: + field = self._oneofs[field] + else: + return + except KeyError: + raise ValueError('Protocol message %s has no "%s" field.' % + (message_descriptor.name, field_name)) + + if field in self._fields: + # To match the C++ implementation, we need to invalidate iterators + # for map fields when ClearField() happens. + if hasattr(self._fields[field], 'InvalidateIterators'): + self._fields[field].InvalidateIterators() + + # Note: If the field is a sub-message, its listener will still point + # at us. That's fine, because the worst than can happen is that it + # will call _Modified() and invalidate our byte size. Big deal. + del self._fields[field] + + if self._oneofs.get(field.containing_oneof, None) is field: + del self._oneofs[field.containing_oneof] + + # Always call _Modified() -- even if nothing was changed, this is + # a mutating method, and thus calling it should cause the field to become + # present in the parent message. + self._Modified() + + cls.ClearField = ClearField + + +def _AddClearExtensionMethod(cls): + """Helper for _AddMessageMethods().""" + def ClearExtension(self, extension_handle): + extension_dict._VerifyExtensionHandle(self, extension_handle) + + # Similar to ClearField(), above. + if extension_handle in self._fields: + del self._fields[extension_handle] + self._Modified() + cls.ClearExtension = ClearExtension + + +def _AddHasExtensionMethod(cls): + """Helper for _AddMessageMethods().""" + def HasExtension(self, extension_handle): + extension_dict._VerifyExtensionHandle(self, extension_handle) + if extension_handle.label == _FieldDescriptor.LABEL_REPEATED: + raise KeyError('"%s" is repeated.' % extension_handle.full_name) + + if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + value = self._fields.get(extension_handle) + return value is not None and value._is_present_in_parent + else: + return extension_handle in self._fields + cls.HasExtension = HasExtension + +def _InternalUnpackAny(msg): + """Unpacks Any message and returns the unpacked message. + + This internal method is different from public Any Unpack method which takes + the target message as argument. _InternalUnpackAny method does not have + target message type and need to find the message type in descriptor pool. + + Args: + msg: An Any message to be unpacked. + + Returns: + The unpacked message. + """ + # TODO(amauryfa): Don't use the factory of generated messages. + # To make Any work with custom factories, use the message factory of the + # parent message. + # pylint: disable=g-import-not-at-top + from google.protobuf import symbol_database + factory = symbol_database.Default() + + type_url = msg.type_url + + if not type_url: + return None + + # TODO(haberman): For now we just strip the hostname. Better logic will be + # required. + type_name = type_url.split('/')[-1] + descriptor = factory.pool.FindMessageTypeByName(type_name) + + if descriptor is None: + return None + + message_class = factory.GetPrototype(descriptor) + message = message_class() + + message.ParseFromString(msg.value) + return message + + +def _AddEqualsMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __eq__(self, other): + if (not isinstance(other, message_mod.Message) or + other.DESCRIPTOR != self.DESCRIPTOR): + return False + + if self is other: + return True + + if self.DESCRIPTOR.full_name == _AnyFullTypeName: + any_a = _InternalUnpackAny(self) + any_b = _InternalUnpackAny(other) + if any_a and any_b: + return any_a == any_b + + if not self.ListFields() == other.ListFields(): + return False + + # TODO(jieluo): Fix UnknownFieldSet to consider MessageSet extensions, + # then use it for the comparison. + unknown_fields = list(self._unknown_fields) + unknown_fields.sort() + other_unknown_fields = list(other._unknown_fields) + other_unknown_fields.sort() + return unknown_fields == other_unknown_fields + + cls.__eq__ = __eq__ + + +def _AddStrMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __str__(self): + return text_format.MessageToString(self) + cls.__str__ = __str__ + + +def _AddReprMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __repr__(self): + return text_format.MessageToString(self) + cls.__repr__ = __repr__ + + +def _AddUnicodeMethod(unused_message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def __unicode__(self): + return text_format.MessageToString(self, as_utf8=True).decode('utf-8') + cls.__unicode__ = __unicode__ + + +def _BytesForNonRepeatedElement(value, field_number, field_type): + """Returns the number of bytes needed to serialize a non-repeated element. + The returned byte count includes space for tag information and any + other additional space associated with serializing value. + + Args: + value: Value we're serializing. + field_number: Field number of this value. (Since the field number + is stored as part of a varint-encoded tag, this has an impact + on the total bytes required to serialize the value). + field_type: The type of the field. One of the TYPE_* constants + within FieldDescriptor. + """ + try: + fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type] + return fn(field_number, value) + except KeyError: + raise message_mod.EncodeError('Unrecognized field type: %d' % field_type) + + +def _AddByteSizeMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def ByteSize(self): + if not self._cached_byte_size_dirty: + return self._cached_byte_size + + size = 0 + descriptor = self.DESCRIPTOR + if descriptor.GetOptions().map_entry: + # Fields of map entry should always be serialized. + size = descriptor.fields_by_name['key']._sizer(self.key) + size += descriptor.fields_by_name['value']._sizer(self.value) + else: + for field_descriptor, field_value in self.ListFields(): + size += field_descriptor._sizer(field_value) + for tag_bytes, value_bytes in self._unknown_fields: + size += len(tag_bytes) + len(value_bytes) + + self._cached_byte_size = size + self._cached_byte_size_dirty = False + self._listener_for_children.dirty = False + return size + + cls.ByteSize = ByteSize + + +def _AddSerializeToStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def SerializeToString(self, **kwargs): + # Check if the message has all of its required fields set. + if not self.IsInitialized(): + raise message_mod.EncodeError( + 'Message %s is missing required fields: %s' % ( + self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors()))) + return self.SerializePartialToString(**kwargs) + cls.SerializeToString = SerializeToString + + +def _AddSerializePartialToStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def SerializePartialToString(self, **kwargs): + out = BytesIO() + self._InternalSerialize(out.write, **kwargs) + return out.getvalue() + cls.SerializePartialToString = SerializePartialToString + + def InternalSerialize(self, write_bytes, deterministic=None): + if deterministic is None: + deterministic = ( + api_implementation.IsPythonDefaultSerializationDeterministic()) + else: + deterministic = bool(deterministic) + + descriptor = self.DESCRIPTOR + if descriptor.GetOptions().map_entry: + # Fields of map entry should always be serialized. + descriptor.fields_by_name['key']._encoder( + write_bytes, self.key, deterministic) + descriptor.fields_by_name['value']._encoder( + write_bytes, self.value, deterministic) + else: + for field_descriptor, field_value in self.ListFields(): + field_descriptor._encoder(write_bytes, field_value, deterministic) + for tag_bytes, value_bytes in self._unknown_fields: + write_bytes(tag_bytes) + write_bytes(value_bytes) + cls._InternalSerialize = InternalSerialize + + +def _AddMergeFromStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def MergeFromString(self, serialized): + serialized = memoryview(serialized) + length = len(serialized) + try: + if self._InternalParse(serialized, 0, length) != length: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise message_mod.DecodeError('Unexpected end-group tag.') + except (IndexError, TypeError): + # Now ord(buf[p:p+1]) == ord('') gets TypeError. + raise message_mod.DecodeError('Truncated message.') + except struct.error as e: + raise message_mod.DecodeError(e) + return length # Return this for legacy reasons. + cls.MergeFromString = MergeFromString + + local_ReadTag = decoder.ReadTag + local_SkipField = decoder.SkipField + decoders_by_tag = cls._decoders_by_tag + + def InternalParse(self, buffer, pos, end): + """Create a message from serialized bytes. + + Args: + self: Message, instance of the proto message object. + buffer: memoryview of the serialized data. + pos: int, position to start in the serialized data. + end: int, end position of the serialized data. + + Returns: + Message object. + """ + # Guard against internal misuse, since this function is called internally + # quite extensively, and its easy to accidentally pass bytes. + assert isinstance(buffer, memoryview) + self._Modified() + field_dict = self._fields + # pylint: disable=protected-access + unknown_field_set = self._unknown_field_set + while pos != end: + (tag_bytes, new_pos) = local_ReadTag(buffer, pos) + field_decoder, field_desc = decoders_by_tag.get(tag_bytes, (None, None)) + if field_decoder is None: + if not self._unknown_fields: # pylint: disable=protected-access + self._unknown_fields = [] # pylint: disable=protected-access + if unknown_field_set is None: + # pylint: disable=protected-access + self._unknown_field_set = containers.UnknownFieldSet() + # pylint: disable=protected-access + unknown_field_set = self._unknown_field_set + # pylint: disable=protected-access + (tag, _) = decoder._DecodeVarint(tag_bytes, 0) + field_number, wire_type = wire_format.UnpackTag(tag) + if field_number == 0: + raise message_mod.DecodeError('Field number 0 is illegal.') + # TODO(jieluo): remove old_pos. + old_pos = new_pos + (data, new_pos) = decoder._DecodeUnknownField( + buffer, new_pos, wire_type) # pylint: disable=protected-access + if new_pos == -1: + return pos + # pylint: disable=protected-access + unknown_field_set._add(field_number, wire_type, data) + # TODO(jieluo): remove _unknown_fields. + new_pos = local_SkipField(buffer, old_pos, end, tag_bytes) + if new_pos == -1: + return pos + self._unknown_fields.append( + (tag_bytes, buffer[old_pos:new_pos].tobytes())) + pos = new_pos + else: + pos = field_decoder(buffer, new_pos, end, self, field_dict) + if field_desc: + self._UpdateOneofState(field_desc) + return pos + cls._InternalParse = InternalParse + + +def _AddIsInitializedMethod(message_descriptor, cls): + """Adds the IsInitialized and FindInitializationError methods to the + protocol message class.""" + + required_fields = [field for field in message_descriptor.fields + if field.label == _FieldDescriptor.LABEL_REQUIRED] + + def IsInitialized(self, errors=None): + """Checks if all required fields of a message are set. + + Args: + errors: A list which, if provided, will be populated with the field + paths of all missing required fields. + + Returns: + True iff the specified message has all required fields set. + """ + + # Performance is critical so we avoid HasField() and ListFields(). + + for field in required_fields: + if (field not in self._fields or + (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and + not self._fields[field]._is_present_in_parent)): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + + for field, value in list(self._fields.items()): # dict can change size! + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if field.label == _FieldDescriptor.LABEL_REPEATED: + if (field.message_type.has_options and + field.message_type.GetOptions().map_entry): + continue + for element in value: + if not element.IsInitialized(): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + elif value._is_present_in_parent and not value.IsInitialized(): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + + return True + + cls.IsInitialized = IsInitialized + + def FindInitializationErrors(self): + """Finds required fields which are not initialized. + + Returns: + A list of strings. Each string is a path to an uninitialized field from + the top-level message, e.g. "foo.bar[5].baz". + """ + + errors = [] # simplify things + + for field in required_fields: + if not self.HasField(field.name): + errors.append(field.name) + + for field, value in self.ListFields(): + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if field.is_extension: + name = '(%s)' % field.full_name + else: + name = field.name + + if _IsMapField(field): + if _IsMessageMapField(field): + for key in value: + element = value[key] + prefix = '%s[%s].' % (name, key) + sub_errors = element.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + else: + # ScalarMaps can't have any initialization errors. + pass + elif field.label == _FieldDescriptor.LABEL_REPEATED: + for i in range(len(value)): + element = value[i] + prefix = '%s[%d].' % (name, i) + sub_errors = element.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + else: + prefix = name + '.' + sub_errors = value.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + + return errors + + cls.FindInitializationErrors = FindInitializationErrors + + +def _FullyQualifiedClassName(klass): + module = klass.__module__ + name = getattr(klass, '__qualname__', klass.__name__) + if module in (None, 'builtins', '__builtin__'): + return name + return module + '.' + name + + +def _AddMergeFromMethod(cls): + LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED + CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE + + def MergeFrom(self, msg): + if not isinstance(msg, cls): + raise TypeError( + 'Parameter to MergeFrom() must be instance of same class: ' + 'expected %s got %s.' % (_FullyQualifiedClassName(cls), + _FullyQualifiedClassName(msg.__class__))) + + assert msg is not self + self._Modified() + + fields = self._fields + + for field, value in msg._fields.items(): + if field.label == LABEL_REPEATED: + field_value = fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + fields[field] = field_value + field_value.MergeFrom(value) + elif field.cpp_type == CPPTYPE_MESSAGE: + if value._is_present_in_parent: + field_value = fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + fields[field] = field_value + field_value.MergeFrom(value) + else: + self._fields[field] = value + if field.containing_oneof: + self._UpdateOneofState(field) + + if msg._unknown_fields: + if not self._unknown_fields: + self._unknown_fields = [] + self._unknown_fields.extend(msg._unknown_fields) + # pylint: disable=protected-access + if self._unknown_field_set is None: + self._unknown_field_set = containers.UnknownFieldSet() + self._unknown_field_set._extend(msg._unknown_field_set) + + cls.MergeFrom = MergeFrom + + +def _AddWhichOneofMethod(message_descriptor, cls): + def WhichOneof(self, oneof_name): + """Returns the name of the currently set field inside a oneof, or None.""" + try: + field = message_descriptor.oneofs_by_name[oneof_name] + except KeyError: + raise ValueError( + 'Protocol message has no oneof "%s" field.' % oneof_name) + + nested_field = self._oneofs.get(field, None) + if nested_field is not None and self.HasField(nested_field.name): + return nested_field.name + else: + return None + + cls.WhichOneof = WhichOneof + + +def _Clear(self): + # Clear fields. + self._fields = {} + self._unknown_fields = () + # pylint: disable=protected-access + if self._unknown_field_set is not None: + self._unknown_field_set._clear() + self._unknown_field_set = None + + self._oneofs = {} + self._Modified() + + +def _UnknownFields(self): + if self._unknown_field_set is None: # pylint: disable=protected-access + # pylint: disable=protected-access + self._unknown_field_set = containers.UnknownFieldSet() + return self._unknown_field_set # pylint: disable=protected-access + + +def _DiscardUnknownFields(self): + self._unknown_fields = [] + self._unknown_field_set = None # pylint: disable=protected-access + for field, value in self.ListFields(): + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if _IsMapField(field): + if _IsMessageMapField(field): + for key in value: + value[key].DiscardUnknownFields() + elif field.label == _FieldDescriptor.LABEL_REPEATED: + for sub_message in value: + sub_message.DiscardUnknownFields() + else: + value.DiscardUnknownFields() + + +def _SetListener(self, listener): + if listener is None: + self._listener = message_listener_mod.NullMessageListener() + else: + self._listener = listener + + +def _AddMessageMethods(message_descriptor, cls): + """Adds implementations of all Message methods to cls.""" + _AddListFieldsMethod(message_descriptor, cls) + _AddHasFieldMethod(message_descriptor, cls) + _AddClearFieldMethod(message_descriptor, cls) + if message_descriptor.is_extendable: + _AddClearExtensionMethod(cls) + _AddHasExtensionMethod(cls) + _AddEqualsMethod(message_descriptor, cls) + _AddStrMethod(message_descriptor, cls) + _AddReprMethod(message_descriptor, cls) + _AddUnicodeMethod(message_descriptor, cls) + _AddByteSizeMethod(message_descriptor, cls) + _AddSerializeToStringMethod(message_descriptor, cls) + _AddSerializePartialToStringMethod(message_descriptor, cls) + _AddMergeFromStringMethod(message_descriptor, cls) + _AddIsInitializedMethod(message_descriptor, cls) + _AddMergeFromMethod(cls) + _AddWhichOneofMethod(message_descriptor, cls) + # Adds methods which do not depend on cls. + cls.Clear = _Clear + cls.UnknownFields = _UnknownFields + cls.DiscardUnknownFields = _DiscardUnknownFields + cls._SetListener = _SetListener + + +def _AddPrivateHelperMethods(message_descriptor, cls): + """Adds implementation of private helper methods to cls.""" + + def Modified(self): + """Sets the _cached_byte_size_dirty bit to true, + and propagates this to our listener iff this was a state change. + """ + + # Note: Some callers check _cached_byte_size_dirty before calling + # _Modified() as an extra optimization. So, if this method is ever + # changed such that it does stuff even when _cached_byte_size_dirty is + # already true, the callers need to be updated. + if not self._cached_byte_size_dirty: + self._cached_byte_size_dirty = True + self._listener_for_children.dirty = True + self._is_present_in_parent = True + self._listener.Modified() + + def _UpdateOneofState(self, field): + """Sets field as the active field in its containing oneof. + + Will also delete currently active field in the oneof, if it is different + from the argument. Does not mark the message as modified. + """ + other_field = self._oneofs.setdefault(field.containing_oneof, field) + if other_field is not field: + del self._fields[other_field] + self._oneofs[field.containing_oneof] = field + + cls._Modified = Modified + cls.SetInParent = Modified + cls._UpdateOneofState = _UpdateOneofState + + +class _Listener(object): + + """MessageListener implementation that a parent message registers with its + child message. + + In order to support semantics like: + + foo.bar.baz.qux = 23 + assert foo.HasField('bar') + + ...child objects must have back references to their parents. + This helper class is at the heart of this support. + """ + + def __init__(self, parent_message): + """Args: + parent_message: The message whose _Modified() method we should call when + we receive Modified() messages. + """ + # This listener establishes a back reference from a child (contained) object + # to its parent (containing) object. We make this a weak reference to avoid + # creating cyclic garbage when the client finishes with the 'parent' object + # in the tree. + if isinstance(parent_message, weakref.ProxyType): + self._parent_message_weakref = parent_message + else: + self._parent_message_weakref = weakref.proxy(parent_message) + + # As an optimization, we also indicate directly on the listener whether + # or not the parent message is dirty. This way we can avoid traversing + # up the tree in the common case. + self.dirty = False + + def Modified(self): + if self.dirty: + return + try: + # Propagate the signal to our parents iff this is the first field set. + self._parent_message_weakref._Modified() + except ReferenceError: + # We can get here if a client has kept a reference to a child object, + # and is now setting a field on it, but the child's parent has been + # garbage-collected. This is not an error. + pass + + +class _OneofListener(_Listener): + """Special listener implementation for setting composite oneof fields.""" + + def __init__(self, parent_message, field): + """Args: + parent_message: The message whose _Modified() method we should call when + we receive Modified() messages. + field: The descriptor of the field being set in the parent message. + """ + super(_OneofListener, self).__init__(parent_message) + self._field = field + + def Modified(self): + """Also updates the state of the containing oneof in the parent message.""" + try: + self._parent_message_weakref._UpdateOneofState(self._field) + super(_OneofListener, self).Modified() + except ReferenceError: + pass diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/type_checkers.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/type_checkers.py new file mode 100644 index 0000000000..a53e71fe8e --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/type_checkers.py @@ -0,0 +1,435 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides type checking routines. + +This module defines type checking utilities in the forms of dictionaries: + +VALUE_CHECKERS: A dictionary of field types and a value validation object. +TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing + function. +TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization + function. +FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their + corresponding wire types. +TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization + function. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import ctypes +import numbers + +from google.protobuf.internal import decoder +from google.protobuf.internal import encoder +from google.protobuf.internal import wire_format +from google.protobuf import descriptor + +_FieldDescriptor = descriptor.FieldDescriptor + + +def TruncateToFourByteFloat(original): + return ctypes.c_float(original).value + + +def ToShortestFloat(original): + """Returns the shortest float that has same value in wire.""" + # All 4 byte floats have between 6 and 9 significant digits, so we + # start with 6 as the lower bound. + # It has to be iterative because use '.9g' directly can not get rid + # of the noises for most values. For example if set a float_field=0.9 + # use '.9g' will print 0.899999976. + precision = 6 + rounded = float('{0:.{1}g}'.format(original, precision)) + while TruncateToFourByteFloat(rounded) != original: + precision += 1 + rounded = float('{0:.{1}g}'.format(original, precision)) + return rounded + + +def SupportsOpenEnums(field_descriptor): + return field_descriptor.containing_type.syntax == 'proto3' + + +def GetTypeChecker(field): + """Returns a type checker for a message field of the specified types. + + Args: + field: FieldDescriptor object for this field. + + Returns: + An instance of TypeChecker which can be used to verify the types + of values assigned to a field of the specified type. + """ + if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and + field.type == _FieldDescriptor.TYPE_STRING): + return UnicodeValueChecker() + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + if SupportsOpenEnums(field): + # When open enums are supported, any int32 can be assigned. + return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32] + else: + return EnumValueChecker(field.enum_type) + return _VALUE_CHECKERS[field.cpp_type] + + +# None of the typecheckers below make any attempt to guard against people +# subclassing builtin types and doing weird things. We're not trying to +# protect against malicious clients here, just people accidentally shooting +# themselves in the foot in obvious ways. +class TypeChecker(object): + + """Type checker used to catch type errors as early as possible + when the client is setting scalar fields in protocol messages. + """ + + def __init__(self, *acceptable_types): + self._acceptable_types = acceptable_types + + def CheckValue(self, proposed_value): + """Type check the provided value and return it. + + The returned value might have been normalized to another type. + """ + if not isinstance(proposed_value, self._acceptable_types): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), self._acceptable_types)) + raise TypeError(message) + return proposed_value + + +class TypeCheckerWithDefault(TypeChecker): + + def __init__(self, default_value, *acceptable_types): + TypeChecker.__init__(self, *acceptable_types) + self._default_value = default_value + + def DefaultValue(self): + return self._default_value + + +class BoolValueChecker(object): + """Type checker used for bool fields.""" + + def CheckValue(self, proposed_value): + if not hasattr(proposed_value, '__index__') or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (bool, int))) + raise TypeError(message) + return bool(proposed_value) + + def DefaultValue(self): + return False + + +# IntValueChecker and its subclasses perform integer type-checks +# and bounds-checks. +class IntValueChecker(object): + + """Checker used for integer fields. Performs type-check and range check.""" + + def CheckValue(self, proposed_value): + if not hasattr(proposed_value, '__index__') or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (int,))) + raise TypeError(message) + + if not self._MIN <= int(proposed_value) <= self._MAX: + raise ValueError('Value out of range: %d' % proposed_value) + # We force all values to int to make alternate implementations where the + # distinction is more significant (e.g. the C++ implementation) simpler. + proposed_value = int(proposed_value) + return proposed_value + + def DefaultValue(self): + return 0 + + +class EnumValueChecker(object): + + """Checker used for enum fields. Performs type-check and range check.""" + + def __init__(self, enum_type): + self._enum_type = enum_type + + def CheckValue(self, proposed_value): + if not isinstance(proposed_value, numbers.Integral): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (int,))) + raise TypeError(message) + if int(proposed_value) not in self._enum_type.values_by_number: + raise ValueError('Unknown enum value: %d' % proposed_value) + return proposed_value + + def DefaultValue(self): + return self._enum_type.values[0].number + + +class UnicodeValueChecker(object): + + """Checker used for string fields. + + Always returns a unicode value, even if the input is of type str. + """ + + def CheckValue(self, proposed_value): + if not isinstance(proposed_value, (bytes, str)): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (bytes, str))) + raise TypeError(message) + + # If the value is of type 'bytes' make sure that it is valid UTF-8 data. + if isinstance(proposed_value, bytes): + try: + proposed_value = proposed_value.decode('utf-8') + except UnicodeDecodeError: + raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 ' + 'encoding. Non-UTF-8 strings must be converted to ' + 'unicode objects before being added.' % + (proposed_value)) + else: + try: + proposed_value.encode('utf8') + except UnicodeEncodeError: + raise ValueError('%.1024r isn\'t a valid unicode string and ' + 'can\'t be encoded in UTF-8.'% + (proposed_value)) + + return proposed_value + + def DefaultValue(self): + return u"" + + +class Int32ValueChecker(IntValueChecker): + # We're sure to use ints instead of longs here since comparison may be more + # efficient. + _MIN = -2147483648 + _MAX = 2147483647 + + +class Uint32ValueChecker(IntValueChecker): + _MIN = 0 + _MAX = (1 << 32) - 1 + + +class Int64ValueChecker(IntValueChecker): + _MIN = -(1 << 63) + _MAX = (1 << 63) - 1 + + +class Uint64ValueChecker(IntValueChecker): + _MIN = 0 + _MAX = (1 << 64) - 1 + + +# The max 4 bytes float is about 3.4028234663852886e+38 +_FLOAT_MAX = float.fromhex('0x1.fffffep+127') +_FLOAT_MIN = -_FLOAT_MAX +_INF = float('inf') +_NEG_INF = float('-inf') + + +class DoubleValueChecker(object): + """Checker used for double fields. + + Performs type-check and range check. + """ + + def CheckValue(self, proposed_value): + """Check and convert proposed_value to float.""" + if (not hasattr(proposed_value, '__float__') and + not hasattr(proposed_value, '__index__')) or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: int, float' % + (proposed_value, type(proposed_value))) + raise TypeError(message) + return float(proposed_value) + + def DefaultValue(self): + return 0.0 + + +class FloatValueChecker(DoubleValueChecker): + """Checker used for float fields. + + Performs type-check and range check. + + Values exceeding a 32-bit float will be converted to inf/-inf. + """ + + def CheckValue(self, proposed_value): + """Check and convert proposed_value to float.""" + converted_value = super().CheckValue(proposed_value) + # This inf rounding matches the C++ proto SafeDoubleToFloat logic. + if converted_value > _FLOAT_MAX: + return _INF + if converted_value < _FLOAT_MIN: + return _NEG_INF + + return TruncateToFourByteFloat(converted_value) + +# Type-checkers for all scalar CPPTYPEs. +_VALUE_CHECKERS = { + _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(), + _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), + _FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(), + _FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(), + _FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(), + _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes), +} + + +# Map from field type to a function F, such that F(field_num, value) +# gives the total byte size for a value of the given type. This +# byte size includes tag information and any other additional space +# associated with serializing "value". +TYPE_TO_BYTE_SIZE_FN = { + _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize, + _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize, + _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize, + _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize, + _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize, + _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize, + _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize, + _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize, + _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize, + _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize, + _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize, + _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize, + _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize, + _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize, + _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize, + _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize, + _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize, + _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize + } + + +# Maps from field types to encoder constructors. +TYPE_TO_ENCODER = { + _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder, + _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder, + _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder, + _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder, + _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder, + _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder, + _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder, + _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder, + _FieldDescriptor.TYPE_STRING: encoder.StringEncoder, + _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder, + _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder, + _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder, + _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder, + _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder, + _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder, + _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder, + _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder, + _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder, + } + + +# Maps from field types to sizer constructors. +TYPE_TO_SIZER = { + _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer, + _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer, + _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer, + _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer, + _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer, + _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer, + _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer, + _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer, + _FieldDescriptor.TYPE_STRING: encoder.StringSizer, + _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer, + _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer, + _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer, + _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer, + _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer, + _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer, + _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer, + _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer, + _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer, + } + + +# Maps from field type to a decoder constructor. +TYPE_TO_DECODER = { + _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder, + _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder, + _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder, + _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder, + _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder, + _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder, + _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder, + _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder, + _FieldDescriptor.TYPE_STRING: decoder.StringDecoder, + _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder, + _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder, + _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder, + _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder, + _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder, + _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder, + _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder, + _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder, + _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder, + } + +# Maps from field type to expected wiretype. +FIELD_TYPE_TO_WIRE_TYPE = { + _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_STRING: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP, + _FieldDescriptor.TYPE_MESSAGE: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_BYTES: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT, + } diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/well_known_types.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/well_known_types.py new file mode 100644 index 0000000000..b581ab750a --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/well_known_types.py @@ -0,0 +1,878 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains well known classes. + +This files defines well known classes which need extra maintenance including: + - Any + - Duration + - FieldMask + - Struct + - Timestamp +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + +import calendar +import collections.abc +import datetime + +from google.protobuf.descriptor import FieldDescriptor + +_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' +_NANOS_PER_SECOND = 1000000000 +_NANOS_PER_MILLISECOND = 1000000 +_NANOS_PER_MICROSECOND = 1000 +_MILLIS_PER_SECOND = 1000 +_MICROS_PER_SECOND = 1000000 +_SECONDS_PER_DAY = 24 * 3600 +_DURATION_SECONDS_MAX = 315576000000 + + +class Any(object): + """Class for Any Message type.""" + + __slots__ = () + + def Pack(self, msg, type_url_prefix='type.googleapis.com/', + deterministic=None): + """Packs the specified message into current Any message.""" + if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/': + self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) + else: + self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) + self.value = msg.SerializeToString(deterministic=deterministic) + + def Unpack(self, msg): + """Unpacks the current Any message into specified message.""" + descriptor = msg.DESCRIPTOR + if not self.Is(descriptor): + return False + msg.ParseFromString(self.value) + return True + + def TypeName(self): + """Returns the protobuf type name of the inner message.""" + # Only last part is to be used: b/25630112 + return self.type_url.split('/')[-1] + + def Is(self, descriptor): + """Checks if this Any represents the given protobuf type.""" + return '/' in self.type_url and self.TypeName() == descriptor.full_name + + +_EPOCH_DATETIME_NAIVE = datetime.datetime.utcfromtimestamp(0) +_EPOCH_DATETIME_AWARE = datetime.datetime.fromtimestamp( + 0, tz=datetime.timezone.utc) + + +class Timestamp(object): + """Class for Timestamp message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts Timestamp to RFC 3339 date string format. + + Returns: + A string converted from timestamp. The string is always Z-normalized + and uses 3, 6 or 9 fractional digits as required to represent the + exact time. Example of the return format: '1972-01-01T10:00:20.021Z' + """ + nanos = self.nanos % _NANOS_PER_SECOND + total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND + seconds = total_sec % _SECONDS_PER_DAY + days = (total_sec - seconds) // _SECONDS_PER_DAY + dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(days, seconds) + + result = dt.isoformat() + if (nanos % 1e9) == 0: + # If there are 0 fractional digits, the fractional + # point '.' should be omitted when serializing. + return result + 'Z' + if (nanos % 1e6) == 0: + # Serialize 3 fractional digits. + return result + '.%03dZ' % (nanos / 1e6) + if (nanos % 1e3) == 0: + # Serialize 6 fractional digits. + return result + '.%06dZ' % (nanos / 1e3) + # Serialize 9 fractional digits. + return result + '.%09dZ' % nanos + + def FromJsonString(self, value): + """Parse a RFC 3339 date string format to Timestamp. + + Args: + value: A date string. Any fractional digits (or none) and any offset are + accepted as long as they fit into nano-seconds precision. + Example of accepted format: '1972-01-01T10:00:20.021-05:00' + + Raises: + ValueError: On parsing problems. + """ + if not isinstance(value, str): + raise ValueError('Timestamp JSON value not a string: {!r}'.format(value)) + timezone_offset = value.find('Z') + if timezone_offset == -1: + timezone_offset = value.find('+') + if timezone_offset == -1: + timezone_offset = value.rfind('-') + if timezone_offset == -1: + raise ValueError( + 'Failed to parse timestamp: missing valid timezone offset.') + time_value = value[0:timezone_offset] + # Parse datetime and nanos. + point_position = time_value.find('.') + if point_position == -1: + second_value = time_value + nano_value = '' + else: + second_value = time_value[:point_position] + nano_value = time_value[point_position + 1:] + if 't' in second_value: + raise ValueError( + 'time data \'{0}\' does not match format \'%Y-%m-%dT%H:%M:%S\', ' + 'lowercase \'t\' is not accepted'.format(second_value)) + date_object = datetime.datetime.strptime(second_value, _TIMESTAMPFOMAT) + td = date_object - datetime.datetime(1970, 1, 1) + seconds = td.seconds + td.days * _SECONDS_PER_DAY + if len(nano_value) > 9: + raise ValueError( + 'Failed to parse Timestamp: nanos {0} more than ' + '9 fractional digits.'.format(nano_value)) + if nano_value: + nanos = round(float('0.' + nano_value) * 1e9) + else: + nanos = 0 + # Parse timezone offsets. + if value[timezone_offset] == 'Z': + if len(value) != timezone_offset + 1: + raise ValueError('Failed to parse timestamp: invalid trailing' + ' data {0}.'.format(value)) + else: + timezone = value[timezone_offset:] + pos = timezone.find(':') + if pos == -1: + raise ValueError( + 'Invalid timezone offset value: {0}.'.format(timezone)) + if timezone[0] == '+': + seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 + else: + seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 + # Set seconds and nanos + self.seconds = int(seconds) + self.nanos = int(nanos) + + def GetCurrentTime(self): + """Get the current UTC into Timestamp.""" + self.FromDatetime(datetime.datetime.utcnow()) + + def ToNanoseconds(self): + """Converts Timestamp to nanoseconds since epoch.""" + return self.seconds * _NANOS_PER_SECOND + self.nanos + + def ToMicroseconds(self): + """Converts Timestamp to microseconds since epoch.""" + return (self.seconds * _MICROS_PER_SECOND + + self.nanos // _NANOS_PER_MICROSECOND) + + def ToMilliseconds(self): + """Converts Timestamp to milliseconds since epoch.""" + return (self.seconds * _MILLIS_PER_SECOND + + self.nanos // _NANOS_PER_MILLISECOND) + + def ToSeconds(self): + """Converts Timestamp to seconds since epoch.""" + return self.seconds + + def FromNanoseconds(self, nanos): + """Converts nanoseconds since epoch to Timestamp.""" + self.seconds = nanos // _NANOS_PER_SECOND + self.nanos = nanos % _NANOS_PER_SECOND + + def FromMicroseconds(self, micros): + """Converts microseconds since epoch to Timestamp.""" + self.seconds = micros // _MICROS_PER_SECOND + self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND + + def FromMilliseconds(self, millis): + """Converts milliseconds since epoch to Timestamp.""" + self.seconds = millis // _MILLIS_PER_SECOND + self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND + + def FromSeconds(self, seconds): + """Converts seconds since epoch to Timestamp.""" + self.seconds = seconds + self.nanos = 0 + + def ToDatetime(self, tzinfo=None): + """Converts Timestamp to a datetime. + + Args: + tzinfo: A datetime.tzinfo subclass; defaults to None. + + Returns: + If tzinfo is None, returns a timezone-naive UTC datetime (with no timezone + information, i.e. not aware that it's UTC). + + Otherwise, returns a timezone-aware datetime in the input timezone. + """ + delta = datetime.timedelta( + seconds=self.seconds, + microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)) + if tzinfo is None: + return _EPOCH_DATETIME_NAIVE + delta + else: + return _EPOCH_DATETIME_AWARE.astimezone(tzinfo) + delta + + def FromDatetime(self, dt): + """Converts datetime to Timestamp. + + Args: + dt: A datetime. If it's timezone-naive, it's assumed to be in UTC. + """ + # Using this guide: http://wiki.python.org/moin/WorkingWithTime + # And this conversion guide: http://docs.python.org/library/time.html + + # Turn the date parameter into a tuple (struct_time) that can then be + # manipulated into a long value of seconds. During the conversion from + # struct_time to long, the source date in UTC, and so it follows that the + # correct transformation is calendar.timegm() + self.seconds = calendar.timegm(dt.utctimetuple()) + self.nanos = dt.microsecond * _NANOS_PER_MICROSECOND + + +class Duration(object): + """Class for Duration message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts Duration to string format. + + Returns: + A string converted from self. The string format will contains + 3, 6, or 9 fractional digits depending on the precision required to + represent the exact Duration value. For example: "1s", "1.010s", + "1.000000100s", "-3.100s" + """ + _CheckDurationValid(self.seconds, self.nanos) + if self.seconds < 0 or self.nanos < 0: + result = '-' + seconds = - self.seconds + int((0 - self.nanos) // 1e9) + nanos = (0 - self.nanos) % 1e9 + else: + result = '' + seconds = self.seconds + int(self.nanos // 1e9) + nanos = self.nanos % 1e9 + result += '%d' % seconds + if (nanos % 1e9) == 0: + # If there are 0 fractional digits, the fractional + # point '.' should be omitted when serializing. + return result + 's' + if (nanos % 1e6) == 0: + # Serialize 3 fractional digits. + return result + '.%03ds' % (nanos / 1e6) + if (nanos % 1e3) == 0: + # Serialize 6 fractional digits. + return result + '.%06ds' % (nanos / 1e3) + # Serialize 9 fractional digits. + return result + '.%09ds' % nanos + + def FromJsonString(self, value): + """Converts a string to Duration. + + Args: + value: A string to be converted. The string must end with 's'. Any + fractional digits (or none) are accepted as long as they fit into + precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s + + Raises: + ValueError: On parsing problems. + """ + if not isinstance(value, str): + raise ValueError('Duration JSON value not a string: {!r}'.format(value)) + if len(value) < 1 or value[-1] != 's': + raise ValueError( + 'Duration must end with letter "s": {0}.'.format(value)) + try: + pos = value.find('.') + if pos == -1: + seconds = int(value[:-1]) + nanos = 0 + else: + seconds = int(value[:pos]) + if value[0] == '-': + nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9)) + else: + nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9)) + _CheckDurationValid(seconds, nanos) + self.seconds = seconds + self.nanos = nanos + except ValueError as e: + raise ValueError( + 'Couldn\'t parse duration: {0} : {1}.'.format(value, e)) + + def ToNanoseconds(self): + """Converts a Duration to nanoseconds.""" + return self.seconds * _NANOS_PER_SECOND + self.nanos + + def ToMicroseconds(self): + """Converts a Duration to microseconds.""" + micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND) + return self.seconds * _MICROS_PER_SECOND + micros + + def ToMilliseconds(self): + """Converts a Duration to milliseconds.""" + millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND) + return self.seconds * _MILLIS_PER_SECOND + millis + + def ToSeconds(self): + """Converts a Duration to seconds.""" + return self.seconds + + def FromNanoseconds(self, nanos): + """Converts nanoseconds to Duration.""" + self._NormalizeDuration(nanos // _NANOS_PER_SECOND, + nanos % _NANOS_PER_SECOND) + + def FromMicroseconds(self, micros): + """Converts microseconds to Duration.""" + self._NormalizeDuration( + micros // _MICROS_PER_SECOND, + (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND) + + def FromMilliseconds(self, millis): + """Converts milliseconds to Duration.""" + self._NormalizeDuration( + millis // _MILLIS_PER_SECOND, + (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND) + + def FromSeconds(self, seconds): + """Converts seconds to Duration.""" + self.seconds = seconds + self.nanos = 0 + + def ToTimedelta(self): + """Converts Duration to timedelta.""" + return datetime.timedelta( + seconds=self.seconds, microseconds=_RoundTowardZero( + self.nanos, _NANOS_PER_MICROSECOND)) + + def FromTimedelta(self, td): + """Converts timedelta to Duration.""" + self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY, + td.microseconds * _NANOS_PER_MICROSECOND) + + def _NormalizeDuration(self, seconds, nanos): + """Set Duration by seconds and nanos.""" + # Force nanos to be negative if the duration is negative. + if seconds < 0 and nanos > 0: + seconds += 1 + nanos -= _NANOS_PER_SECOND + self.seconds = seconds + self.nanos = nanos + + +def _CheckDurationValid(seconds, nanos): + if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX: + raise ValueError( + 'Duration is not valid: Seconds {0} must be in range ' + '[-315576000000, 315576000000].'.format(seconds)) + if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND: + raise ValueError( + 'Duration is not valid: Nanos {0} must be in range ' + '[-999999999, 999999999].'.format(nanos)) + if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0): + raise ValueError( + 'Duration is not valid: Sign mismatch.') + + +def _RoundTowardZero(value, divider): + """Truncates the remainder part after division.""" + # For some languages, the sign of the remainder is implementation + # dependent if any of the operands is negative. Here we enforce + # "rounded toward zero" semantics. For example, for (-5) / 2 an + # implementation may give -3 as the result with the remainder being + # 1. This function ensures we always return -2 (closer to zero). + result = value // divider + remainder = value % divider + if result < 0 and remainder > 0: + return result + 1 + else: + return result + + +class FieldMask(object): + """Class for FieldMask message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts FieldMask to string according to proto3 JSON spec.""" + camelcase_paths = [] + for path in self.paths: + camelcase_paths.append(_SnakeCaseToCamelCase(path)) + return ','.join(camelcase_paths) + + def FromJsonString(self, value): + """Converts string to FieldMask according to proto3 JSON spec.""" + if not isinstance(value, str): + raise ValueError('FieldMask JSON value not a string: {!r}'.format(value)) + self.Clear() + if value: + for path in value.split(','): + self.paths.append(_CamelCaseToSnakeCase(path)) + + def IsValidForDescriptor(self, message_descriptor): + """Checks whether the FieldMask is valid for Message Descriptor.""" + for path in self.paths: + if not _IsValidPath(message_descriptor, path): + return False + return True + + def AllFieldsFromDescriptor(self, message_descriptor): + """Gets all direct fields of Message Descriptor to FieldMask.""" + self.Clear() + for field in message_descriptor.fields: + self.paths.append(field.name) + + def CanonicalFormFromMask(self, mask): + """Converts a FieldMask to the canonical form. + + Removes paths that are covered by another path. For example, + "foo.bar" is covered by "foo" and will be removed if "foo" + is also in the FieldMask. Then sorts all paths in alphabetical order. + + Args: + mask: The original FieldMask to be converted. + """ + tree = _FieldMaskTree(mask) + tree.ToFieldMask(self) + + def Union(self, mask1, mask2): + """Merges mask1 and mask2 into this FieldMask.""" + _CheckFieldMaskMessage(mask1) + _CheckFieldMaskMessage(mask2) + tree = _FieldMaskTree(mask1) + tree.MergeFromFieldMask(mask2) + tree.ToFieldMask(self) + + def Intersect(self, mask1, mask2): + """Intersects mask1 and mask2 into this FieldMask.""" + _CheckFieldMaskMessage(mask1) + _CheckFieldMaskMessage(mask2) + tree = _FieldMaskTree(mask1) + intersection = _FieldMaskTree() + for path in mask2.paths: + tree.IntersectPath(path, intersection) + intersection.ToFieldMask(self) + + def MergeMessage( + self, source, destination, + replace_message_field=False, replace_repeated_field=False): + """Merges fields specified in FieldMask from source to destination. + + Args: + source: Source message. + destination: The destination message to be merged into. + replace_message_field: Replace message field if True. Merge message + field if False. + replace_repeated_field: Replace repeated field if True. Append + elements of repeated field if False. + """ + tree = _FieldMaskTree(self) + tree.MergeMessage( + source, destination, replace_message_field, replace_repeated_field) + + +def _IsValidPath(message_descriptor, path): + """Checks whether the path is valid for Message Descriptor.""" + parts = path.split('.') + last = parts.pop() + for name in parts: + field = message_descriptor.fields_by_name.get(name) + if (field is None or + field.label == FieldDescriptor.LABEL_REPEATED or + field.type != FieldDescriptor.TYPE_MESSAGE): + return False + message_descriptor = field.message_type + return last in message_descriptor.fields_by_name + + +def _CheckFieldMaskMessage(message): + """Raises ValueError if message is not a FieldMask.""" + message_descriptor = message.DESCRIPTOR + if (message_descriptor.name != 'FieldMask' or + message_descriptor.file.name != 'google/protobuf/field_mask.proto'): + raise ValueError('Message {0} is not a FieldMask.'.format( + message_descriptor.full_name)) + + +def _SnakeCaseToCamelCase(path_name): + """Converts a path name from snake_case to camelCase.""" + result = [] + after_underscore = False + for c in path_name: + if c.isupper(): + raise ValueError( + 'Fail to print FieldMask to Json string: Path name ' + '{0} must not contain uppercase letters.'.format(path_name)) + if after_underscore: + if c.islower(): + result.append(c.upper()) + after_underscore = False + else: + raise ValueError( + 'Fail to print FieldMask to Json string: The ' + 'character after a "_" must be a lowercase letter ' + 'in path name {0}.'.format(path_name)) + elif c == '_': + after_underscore = True + else: + result += c + + if after_underscore: + raise ValueError('Fail to print FieldMask to Json string: Trailing "_" ' + 'in path name {0}.'.format(path_name)) + return ''.join(result) + + +def _CamelCaseToSnakeCase(path_name): + """Converts a field name from camelCase to snake_case.""" + result = [] + for c in path_name: + if c == '_': + raise ValueError('Fail to parse FieldMask: Path name ' + '{0} must not contain "_"s.'.format(path_name)) + if c.isupper(): + result += '_' + result += c.lower() + else: + result += c + return ''.join(result) + + +class _FieldMaskTree(object): + """Represents a FieldMask in a tree structure. + + For example, given a FieldMask "foo.bar,foo.baz,bar.baz", + the FieldMaskTree will be: + [_root] -+- foo -+- bar + | | + | +- baz + | + +- bar --- baz + In the tree, each leaf node represents a field path. + """ + + __slots__ = ('_root',) + + def __init__(self, field_mask=None): + """Initializes the tree by FieldMask.""" + self._root = {} + if field_mask: + self.MergeFromFieldMask(field_mask) + + def MergeFromFieldMask(self, field_mask): + """Merges a FieldMask to the tree.""" + for path in field_mask.paths: + self.AddPath(path) + + def AddPath(self, path): + """Adds a field path into the tree. + + If the field path to add is a sub-path of an existing field path + in the tree (i.e., a leaf node), it means the tree already matches + the given path so nothing will be added to the tree. If the path + matches an existing non-leaf node in the tree, that non-leaf node + will be turned into a leaf node with all its children removed because + the path matches all the node's children. Otherwise, a new path will + be added. + + Args: + path: The field path to add. + """ + node = self._root + for name in path.split('.'): + if name not in node: + node[name] = {} + elif not node[name]: + # Pre-existing empty node implies we already have this entire tree. + return + node = node[name] + # Remove any sub-trees we might have had. + node.clear() + + def ToFieldMask(self, field_mask): + """Converts the tree to a FieldMask.""" + field_mask.Clear() + _AddFieldPaths(self._root, '', field_mask) + + def IntersectPath(self, path, intersection): + """Calculates the intersection part of a field path with this tree. + + Args: + path: The field path to calculates. + intersection: The out tree to record the intersection part. + """ + node = self._root + for name in path.split('.'): + if name not in node: + return + elif not node[name]: + intersection.AddPath(path) + return + node = node[name] + intersection.AddLeafNodes(path, node) + + def AddLeafNodes(self, prefix, node): + """Adds leaf nodes begin with prefix to this tree.""" + if not node: + self.AddPath(prefix) + for name in node: + child_path = prefix + '.' + name + self.AddLeafNodes(child_path, node[name]) + + def MergeMessage( + self, source, destination, + replace_message, replace_repeated): + """Merge all fields specified by this tree from source to destination.""" + _MergeMessage( + self._root, source, destination, replace_message, replace_repeated) + + +def _StrConvert(value): + """Converts value to str if it is not.""" + # This file is imported by c extension and some methods like ClearField + # requires string for the field name. py2/py3 has different text + # type and may use unicode. + if not isinstance(value, str): + return value.encode('utf-8') + return value + + +def _MergeMessage( + node, source, destination, replace_message, replace_repeated): + """Merge all fields specified by a sub-tree from source to destination.""" + source_descriptor = source.DESCRIPTOR + for name in node: + child = node[name] + field = source_descriptor.fields_by_name[name] + if field is None: + raise ValueError('Error: Can\'t find field {0} in message {1}.'.format( + name, source_descriptor.full_name)) + if child: + # Sub-paths are only allowed for singular message fields. + if (field.label == FieldDescriptor.LABEL_REPEATED or + field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE): + raise ValueError('Error: Field {0} in message {1} is not a singular ' + 'message field and cannot have sub-fields.'.format( + name, source_descriptor.full_name)) + if source.HasField(name): + _MergeMessage( + child, getattr(source, name), getattr(destination, name), + replace_message, replace_repeated) + continue + if field.label == FieldDescriptor.LABEL_REPEATED: + if replace_repeated: + destination.ClearField(_StrConvert(name)) + repeated_source = getattr(source, name) + repeated_destination = getattr(destination, name) + repeated_destination.MergeFrom(repeated_source) + else: + if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + if replace_message: + destination.ClearField(_StrConvert(name)) + if source.HasField(name): + getattr(destination, name).MergeFrom(getattr(source, name)) + else: + setattr(destination, name, getattr(source, name)) + + +def _AddFieldPaths(node, prefix, field_mask): + """Adds the field paths descended from node to field_mask.""" + if not node and prefix: + field_mask.paths.append(prefix) + return + for name in sorted(node): + if prefix: + child_path = prefix + '.' + name + else: + child_path = name + _AddFieldPaths(node[name], child_path, field_mask) + + +def _SetStructValue(struct_value, value): + if value is None: + struct_value.null_value = 0 + elif isinstance(value, bool): + # Note: this check must come before the number check because in Python + # True and False are also considered numbers. + struct_value.bool_value = value + elif isinstance(value, str): + struct_value.string_value = value + elif isinstance(value, (int, float)): + struct_value.number_value = value + elif isinstance(value, (dict, Struct)): + struct_value.struct_value.Clear() + struct_value.struct_value.update(value) + elif isinstance(value, (list, ListValue)): + struct_value.list_value.Clear() + struct_value.list_value.extend(value) + else: + raise ValueError('Unexpected type') + + +def _GetStructValue(struct_value): + which = struct_value.WhichOneof('kind') + if which == 'struct_value': + return struct_value.struct_value + elif which == 'null_value': + return None + elif which == 'number_value': + return struct_value.number_value + elif which == 'string_value': + return struct_value.string_value + elif which == 'bool_value': + return struct_value.bool_value + elif which == 'list_value': + return struct_value.list_value + elif which is None: + raise ValueError('Value not set') + + +class Struct(object): + """Class for Struct message type.""" + + __slots__ = () + + def __getitem__(self, key): + return _GetStructValue(self.fields[key]) + + def __contains__(self, item): + return item in self.fields + + def __setitem__(self, key, value): + _SetStructValue(self.fields[key], value) + + def __delitem__(self, key): + del self.fields[key] + + def __len__(self): + return len(self.fields) + + def __iter__(self): + return iter(self.fields) + + def keys(self): # pylint: disable=invalid-name + return self.fields.keys() + + def values(self): # pylint: disable=invalid-name + return [self[key] for key in self] + + def items(self): # pylint: disable=invalid-name + return [(key, self[key]) for key in self] + + def get_or_create_list(self, key): + """Returns a list for this key, creating if it didn't exist already.""" + if not self.fields[key].HasField('list_value'): + # Clear will mark list_value modified which will indeed create a list. + self.fields[key].list_value.Clear() + return self.fields[key].list_value + + def get_or_create_struct(self, key): + """Returns a struct for this key, creating if it didn't exist already.""" + if not self.fields[key].HasField('struct_value'): + # Clear will mark struct_value modified which will indeed create a struct. + self.fields[key].struct_value.Clear() + return self.fields[key].struct_value + + def update(self, dictionary): # pylint: disable=invalid-name + for key, value in dictionary.items(): + _SetStructValue(self.fields[key], value) + +collections.abc.MutableMapping.register(Struct) + + +class ListValue(object): + """Class for ListValue message type.""" + + __slots__ = () + + def __len__(self): + return len(self.values) + + def append(self, value): + _SetStructValue(self.values.add(), value) + + def extend(self, elem_seq): + for value in elem_seq: + self.append(value) + + def __getitem__(self, index): + """Retrieves item by the specified index.""" + return _GetStructValue(self.values.__getitem__(index)) + + def __setitem__(self, index, value): + _SetStructValue(self.values.__getitem__(index), value) + + def __delitem__(self, key): + del self.values[key] + + def items(self): + for i in range(len(self)): + yield self[i] + + def add_struct(self): + """Appends and returns a struct value as the next value in the list.""" + struct_value = self.values.add().struct_value + # Clear will mark struct_value modified which will indeed create a struct. + struct_value.Clear() + return struct_value + + def add_list(self): + """Appends and returns a list value as the next value in the list.""" + list_value = self.values.add().list_value + # Clear will mark list_value modified which will indeed create a list. + list_value.Clear() + return list_value + +collections.abc.MutableSequence.register(ListValue) + + +WKTBASES = { + 'google.protobuf.Any': Any, + 'google.protobuf.Duration': Duration, + 'google.protobuf.FieldMask': FieldMask, + 'google.protobuf.ListValue': ListValue, + 'google.protobuf.Struct': Struct, + 'google.protobuf.Timestamp': Timestamp, +} diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/wire_format.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/wire_format.py new file mode 100644 index 0000000000..883f525585 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/wire_format.py @@ -0,0 +1,268 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Constants and static functions to support protocol buffer wire format.""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import struct +from google.protobuf import descriptor +from google.protobuf import message + + +TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag. +TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7 + +# These numbers identify the wire type of a protocol buffer value. +# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded +# tag-and-type to store one of these WIRETYPE_* constants. +# These values must match WireType enum in google/protobuf/wire_format.h. +WIRETYPE_VARINT = 0 +WIRETYPE_FIXED64 = 1 +WIRETYPE_LENGTH_DELIMITED = 2 +WIRETYPE_START_GROUP = 3 +WIRETYPE_END_GROUP = 4 +WIRETYPE_FIXED32 = 5 +_WIRETYPE_MAX = 5 + + +# Bounds for various integer types. +INT32_MAX = int((1 << 31) - 1) +INT32_MIN = int(-(1 << 31)) +UINT32_MAX = (1 << 32) - 1 + +INT64_MAX = (1 << 63) - 1 +INT64_MIN = -(1 << 63) +UINT64_MAX = (1 << 64) - 1 + +# "struct" format strings that will encode/decode the specified formats. +FORMAT_UINT32_LITTLE_ENDIAN = '> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK) + + +def ZigZagEncode(value): + """ZigZag Transform: Encodes signed integers so that they can be + effectively used with varint encoding. See wire_format.h for + more details. + """ + if value >= 0: + return value << 1 + return (value << 1) ^ (~0) + + +def ZigZagDecode(value): + """Inverse of ZigZagEncode().""" + if not value & 0x1: + return value >> 1 + return (value >> 1) ^ (~0) + + + +# The *ByteSize() functions below return the number of bytes required to +# serialize "field number + type" information and then serialize the value. + + +def Int32ByteSize(field_number, int32): + return Int64ByteSize(field_number, int32) + + +def Int32ByteSizeNoTag(int32): + return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32) + + +def Int64ByteSize(field_number, int64): + # Have to convert to uint before calling UInt64ByteSize(). + return UInt64ByteSize(field_number, 0xffffffffffffffff & int64) + + +def UInt32ByteSize(field_number, uint32): + return UInt64ByteSize(field_number, uint32) + + +def UInt64ByteSize(field_number, uint64): + return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64) + + +def SInt32ByteSize(field_number, int32): + return UInt32ByteSize(field_number, ZigZagEncode(int32)) + + +def SInt64ByteSize(field_number, int64): + return UInt64ByteSize(field_number, ZigZagEncode(int64)) + + +def Fixed32ByteSize(field_number, fixed32): + return TagByteSize(field_number) + 4 + + +def Fixed64ByteSize(field_number, fixed64): + return TagByteSize(field_number) + 8 + + +def SFixed32ByteSize(field_number, sfixed32): + return TagByteSize(field_number) + 4 + + +def SFixed64ByteSize(field_number, sfixed64): + return TagByteSize(field_number) + 8 + + +def FloatByteSize(field_number, flt): + return TagByteSize(field_number) + 4 + + +def DoubleByteSize(field_number, double): + return TagByteSize(field_number) + 8 + + +def BoolByteSize(field_number, b): + return TagByteSize(field_number) + 1 + + +def EnumByteSize(field_number, enum): + return UInt32ByteSize(field_number, enum) + + +def StringByteSize(field_number, string): + return BytesByteSize(field_number, string.encode('utf-8')) + + +def BytesByteSize(field_number, b): + return (TagByteSize(field_number) + + _VarUInt64ByteSizeNoTag(len(b)) + + len(b)) + + +def GroupByteSize(field_number, message): + return (2 * TagByteSize(field_number) # START and END group. + + message.ByteSize()) + + +def MessageByteSize(field_number, message): + return (TagByteSize(field_number) + + _VarUInt64ByteSizeNoTag(message.ByteSize()) + + message.ByteSize()) + + +def MessageSetItemByteSize(field_number, msg): + # First compute the sizes of the tags. + # There are 2 tags for the beginning and ending of the repeated group, that + # is field number 1, one with field number 2 (type_id) and one with field + # number 3 (message). + total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3)) + + # Add the number of bytes for type_id. + total_size += _VarUInt64ByteSizeNoTag(field_number) + + message_size = msg.ByteSize() + + # The number of bytes for encoding the length of the message. + total_size += _VarUInt64ByteSizeNoTag(message_size) + + # The size of the message. + total_size += message_size + return total_size + + +def TagByteSize(field_number): + """Returns the bytes required to serialize a tag with this field number.""" + # Just pass in type 0, since the type won't affect the tag+type size. + return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0)) + + +# Private helper function for the *ByteSize() functions above. + +def _VarUInt64ByteSizeNoTag(uint64): + """Returns the number of bytes required to serialize a single varint + using boundary value comparisons. (unrolled loop optimization -WPierce) + uint64 must be unsigned. + """ + if uint64 <= 0x7f: return 1 + if uint64 <= 0x3fff: return 2 + if uint64 <= 0x1fffff: return 3 + if uint64 <= 0xfffffff: return 4 + if uint64 <= 0x7ffffffff: return 5 + if uint64 <= 0x3ffffffffff: return 6 + if uint64 <= 0x1ffffffffffff: return 7 + if uint64 <= 0xffffffffffffff: return 8 + if uint64 <= 0x7fffffffffffffff: return 9 + if uint64 > UINT64_MAX: + raise message.EncodeError('Value out of range: %d' % uint64) + return 10 + + +NON_PACKABLE_TYPES = ( + descriptor.FieldDescriptor.TYPE_STRING, + descriptor.FieldDescriptor.TYPE_GROUP, + descriptor.FieldDescriptor.TYPE_MESSAGE, + descriptor.FieldDescriptor.TYPE_BYTES +) + + +def IsTypePackable(field_type): + """Return true iff packable = true is valid for fields of this type. + + Args: + field_type: a FieldDescriptor::Type value. + + Returns: + True iff fields of this type are packable. + """ + return field_type not in NON_PACKABLE_TYPES diff --git a/openpype/hosts/hiero/vendor/google/protobuf/json_format.py b/openpype/hosts/hiero/vendor/google/protobuf/json_format.py new file mode 100644 index 0000000000..5024ed89d7 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/json_format.py @@ -0,0 +1,912 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains routines for printing protocol messages in JSON format. + +Simple usage example: + + # Create a proto object and serialize it to a json format string. + message = my_proto_pb2.MyMessage(foo='bar') + json_string = json_format.MessageToJson(message) + + # Parse a json format string to proto object. + message = json_format.Parse(json_string, my_proto_pb2.MyMessage()) +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + + +import base64 +from collections import OrderedDict +import json +import math +from operator import methodcaller +import re +import sys + +from google.protobuf.internal import type_checkers +from google.protobuf import descriptor +from google.protobuf import symbol_database + + +_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' +_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32, + descriptor.FieldDescriptor.CPPTYPE_UINT32, + descriptor.FieldDescriptor.CPPTYPE_INT64, + descriptor.FieldDescriptor.CPPTYPE_UINT64]) +_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64, + descriptor.FieldDescriptor.CPPTYPE_UINT64]) +_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT, + descriptor.FieldDescriptor.CPPTYPE_DOUBLE]) +_INFINITY = 'Infinity' +_NEG_INFINITY = '-Infinity' +_NAN = 'NaN' + +_UNPAIRED_SURROGATE_PATTERN = re.compile( + u'[\ud800-\udbff](?![\udc00-\udfff])|(? self.max_recursion_depth: + raise ParseError('Message too deep. Max recursion depth is {0}'.format( + self.max_recursion_depth)) + message_descriptor = message.DESCRIPTOR + full_name = message_descriptor.full_name + if not path: + path = message_descriptor.name + if _IsWrapperMessage(message_descriptor): + self._ConvertWrapperMessage(value, message, path) + elif full_name in _WKTJSONMETHODS: + methodcaller(_WKTJSONMETHODS[full_name][1], value, message, path)(self) + else: + self._ConvertFieldValuePair(value, message, path) + self.recursion_depth -= 1 + + def _ConvertFieldValuePair(self, js, message, path): + """Convert field value pairs into regular message. + + Args: + js: A JSON object to convert the field value pairs. + message: A regular protocol message to record the data. + path: parent path to log parse error info. + + Raises: + ParseError: In case of problems converting. + """ + names = [] + message_descriptor = message.DESCRIPTOR + fields_by_json_name = dict((f.json_name, f) + for f in message_descriptor.fields) + for name in js: + try: + field = fields_by_json_name.get(name, None) + if not field: + field = message_descriptor.fields_by_name.get(name, None) + if not field and _VALID_EXTENSION_NAME.match(name): + if not message_descriptor.is_extendable: + raise ParseError( + 'Message type {0} does not have extensions at {1}'.format( + message_descriptor.full_name, path)) + identifier = name[1:-1] # strip [] brackets + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(identifier) + # pylint: enable=protected-access + if not field: + # Try looking for extension by the message type name, dropping the + # field name following the final . separator in full_name. + identifier = '.'.join(identifier.split('.')[:-1]) + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(identifier) + # pylint: enable=protected-access + if not field: + if self.ignore_unknown_fields: + continue + raise ParseError( + ('Message type "{0}" has no field named "{1}" at "{2}".\n' + ' Available Fields(except extensions): "{3}"').format( + message_descriptor.full_name, name, path, + [f.json_name for f in message_descriptor.fields])) + if name in names: + raise ParseError('Message type "{0}" should not have multiple ' + '"{1}" fields at "{2}".'.format( + message.DESCRIPTOR.full_name, name, path)) + names.append(name) + value = js[name] + # Check no other oneof field is parsed. + if field.containing_oneof is not None and value is not None: + oneof_name = field.containing_oneof.name + if oneof_name in names: + raise ParseError('Message type "{0}" should not have multiple ' + '"{1}" oneof fields at "{2}".'.format( + message.DESCRIPTOR.full_name, oneof_name, + path)) + names.append(oneof_name) + + if value is None: + if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE + and field.message_type.full_name == 'google.protobuf.Value'): + sub_message = getattr(message, field.name) + sub_message.null_value = 0 + elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM + and field.enum_type.full_name == 'google.protobuf.NullValue'): + setattr(message, field.name, 0) + else: + message.ClearField(field.name) + continue + + # Parse field value. + if _IsMapEntry(field): + message.ClearField(field.name) + self._ConvertMapFieldValue(value, message, field, + '{0}.{1}'.format(path, name)) + elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + message.ClearField(field.name) + if not isinstance(value, list): + raise ParseError('repeated field {0} must be in [] which is ' + '{1} at {2}'.format(name, value, path)) + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + # Repeated message field. + for index, item in enumerate(value): + sub_message = getattr(message, field.name).add() + # None is a null_value in Value. + if (item is None and + sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'): + raise ParseError('null is not allowed to be used as an element' + ' in a repeated field at {0}.{1}[{2}]'.format( + path, name, index)) + self.ConvertMessage(item, sub_message, + '{0}.{1}[{2}]'.format(path, name, index)) + else: + # Repeated scalar field. + for index, item in enumerate(value): + if item is None: + raise ParseError('null is not allowed to be used as an element' + ' in a repeated field at {0}.{1}[{2}]'.format( + path, name, index)) + getattr(message, field.name).append( + _ConvertScalarFieldValue( + item, field, '{0}.{1}[{2}]'.format(path, name, index))) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + if field.is_extension: + sub_message = message.Extensions[field] + else: + sub_message = getattr(message, field.name) + sub_message.SetInParent() + self.ConvertMessage(value, sub_message, '{0}.{1}'.format(path, name)) + else: + if field.is_extension: + message.Extensions[field] = _ConvertScalarFieldValue( + value, field, '{0}.{1}'.format(path, name)) + else: + setattr( + message, field.name, + _ConvertScalarFieldValue(value, field, + '{0}.{1}'.format(path, name))) + except ParseError as e: + if field and field.containing_oneof is None: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + else: + raise ParseError(str(e)) + except ValueError as e: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + except TypeError as e: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + + def _ConvertAnyMessage(self, value, message, path): + """Convert a JSON representation into Any message.""" + if isinstance(value, dict) and not value: + return + try: + type_url = value['@type'] + except KeyError: + raise ParseError( + '@type is missing when parsing any message at {0}'.format(path)) + + try: + sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool) + except TypeError as e: + raise ParseError('{0} at {1}'.format(e, path)) + message_descriptor = sub_message.DESCRIPTOR + full_name = message_descriptor.full_name + if _IsWrapperMessage(message_descriptor): + self._ConvertWrapperMessage(value['value'], sub_message, + '{0}.value'.format(path)) + elif full_name in _WKTJSONMETHODS: + methodcaller(_WKTJSONMETHODS[full_name][1], value['value'], sub_message, + '{0}.value'.format(path))( + self) + else: + del value['@type'] + self._ConvertFieldValuePair(value, sub_message, path) + value['@type'] = type_url + # Sets Any message + message.value = sub_message.SerializeToString() + message.type_url = type_url + + def _ConvertGenericMessage(self, value, message, path): + """Convert a JSON representation into message with FromJsonString.""" + # Duration, Timestamp, FieldMask have a FromJsonString method to do the + # conversion. Users can also call the method directly. + try: + message.FromJsonString(value) + except ValueError as e: + raise ParseError('{0} at {1}'.format(e, path)) + + def _ConvertValueMessage(self, value, message, path): + """Convert a JSON representation into Value message.""" + if isinstance(value, dict): + self._ConvertStructMessage(value, message.struct_value, path) + elif isinstance(value, list): + self._ConvertListValueMessage(value, message.list_value, path) + elif value is None: + message.null_value = 0 + elif isinstance(value, bool): + message.bool_value = value + elif isinstance(value, str): + message.string_value = value + elif isinstance(value, _INT_OR_FLOAT): + message.number_value = value + else: + raise ParseError('Value {0} has unexpected type {1} at {2}'.format( + value, type(value), path)) + + def _ConvertListValueMessage(self, value, message, path): + """Convert a JSON representation into ListValue message.""" + if not isinstance(value, list): + raise ParseError('ListValue must be in [] which is {0} at {1}'.format( + value, path)) + message.ClearField('values') + for index, item in enumerate(value): + self._ConvertValueMessage(item, message.values.add(), + '{0}[{1}]'.format(path, index)) + + def _ConvertStructMessage(self, value, message, path): + """Convert a JSON representation into Struct message.""" + if not isinstance(value, dict): + raise ParseError('Struct must be in a dict which is {0} at {1}'.format( + value, path)) + # Clear will mark the struct as modified so it will be created even if + # there are no values. + message.Clear() + for key in value: + self._ConvertValueMessage(value[key], message.fields[key], + '{0}.{1}'.format(path, key)) + return + + def _ConvertWrapperMessage(self, value, message, path): + """Convert a JSON representation into Wrapper message.""" + field = message.DESCRIPTOR.fields_by_name['value'] + setattr( + message, 'value', + _ConvertScalarFieldValue(value, field, path='{0}.value'.format(path))) + + def _ConvertMapFieldValue(self, value, message, field, path): + """Convert map field value for a message map field. + + Args: + value: A JSON object to convert the map field value. + message: A protocol message to record the converted data. + field: The descriptor of the map field to be converted. + path: parent path to log parse error info. + + Raises: + ParseError: In case of convert problems. + """ + if not isinstance(value, dict): + raise ParseError( + 'Map field {0} must be in a dict which is {1} at {2}'.format( + field.name, value, path)) + key_field = field.message_type.fields_by_name['key'] + value_field = field.message_type.fields_by_name['value'] + for key in value: + key_value = _ConvertScalarFieldValue(key, key_field, + '{0}.key'.format(path), True) + if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + self.ConvertMessage(value[key], + getattr(message, field.name)[key_value], + '{0}[{1}]'.format(path, key_value)) + else: + getattr(message, field.name)[key_value] = _ConvertScalarFieldValue( + value[key], value_field, path='{0}[{1}]'.format(path, key_value)) + + +def _ConvertScalarFieldValue(value, field, path, require_str=False): + """Convert a single scalar field value. + + Args: + value: A scalar value to convert the scalar field value. + field: The descriptor of the field to convert. + path: parent path to log parse error info. + require_str: If True, the field value must be a str. + + Returns: + The converted scalar field value + + Raises: + ParseError: In case of convert problems. + """ + try: + if field.cpp_type in _INT_TYPES: + return _ConvertInteger(value) + elif field.cpp_type in _FLOAT_TYPES: + return _ConvertFloat(value, field) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: + return _ConvertBool(value, require_str) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: + if field.type == descriptor.FieldDescriptor.TYPE_BYTES: + if isinstance(value, str): + encoded = value.encode('utf-8') + else: + encoded = value + # Add extra padding '=' + padded_value = encoded + b'=' * (4 - len(encoded) % 4) + return base64.urlsafe_b64decode(padded_value) + else: + # Checking for unpaired surrogates appears to be unreliable, + # depending on the specific Python version, so we check manually. + if _UNPAIRED_SURROGATE_PATTERN.search(value): + raise ParseError('Unpaired surrogate') + return value + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: + # Convert an enum value. + enum_value = field.enum_type.values_by_name.get(value, None) + if enum_value is None: + try: + number = int(value) + enum_value = field.enum_type.values_by_number.get(number, None) + except ValueError: + raise ParseError('Invalid enum value {0} for enum type {1}'.format( + value, field.enum_type.full_name)) + if enum_value is None: + if field.file.syntax == 'proto3': + # Proto3 accepts unknown enums. + return number + raise ParseError('Invalid enum value {0} for enum type {1}'.format( + value, field.enum_type.full_name)) + return enum_value.number + except ParseError as e: + raise ParseError('{0} at {1}'.format(e, path)) + + +def _ConvertInteger(value): + """Convert an integer. + + Args: + value: A scalar value to convert. + + Returns: + The integer value. + + Raises: + ParseError: If an integer couldn't be consumed. + """ + if isinstance(value, float) and not value.is_integer(): + raise ParseError('Couldn\'t parse integer: {0}'.format(value)) + + if isinstance(value, str) and value.find(' ') != -1: + raise ParseError('Couldn\'t parse integer: "{0}"'.format(value)) + + if isinstance(value, bool): + raise ParseError('Bool value {0} is not acceptable for ' + 'integer field'.format(value)) + + return int(value) + + +def _ConvertFloat(value, field): + """Convert an floating point number.""" + if isinstance(value, float): + if math.isnan(value): + raise ParseError('Couldn\'t parse NaN, use quoted "NaN" instead') + if math.isinf(value): + if value > 0: + raise ParseError('Couldn\'t parse Infinity or value too large, ' + 'use quoted "Infinity" instead') + else: + raise ParseError('Couldn\'t parse -Infinity or value too small, ' + 'use quoted "-Infinity" instead') + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: + # pylint: disable=protected-access + if value > type_checkers._FLOAT_MAX: + raise ParseError('Float value too large') + # pylint: disable=protected-access + if value < type_checkers._FLOAT_MIN: + raise ParseError('Float value too small') + if value == 'nan': + raise ParseError('Couldn\'t parse float "nan", use "NaN" instead') + try: + # Assume Python compatible syntax. + return float(value) + except ValueError: + # Check alternative spellings. + if value == _NEG_INFINITY: + return float('-inf') + elif value == _INFINITY: + return float('inf') + elif value == _NAN: + return float('nan') + else: + raise ParseError('Couldn\'t parse float: {0}'.format(value)) + + +def _ConvertBool(value, require_str): + """Convert a boolean value. + + Args: + value: A scalar value to convert. + require_str: If True, value must be a str. + + Returns: + The bool parsed. + + Raises: + ParseError: If a boolean value couldn't be consumed. + """ + if require_str: + if value == 'true': + return True + elif value == 'false': + return False + else: + raise ParseError('Expected "true" or "false", not {0}'.format(value)) + + if not isinstance(value, bool): + raise ParseError('Expected true or false without quotes') + return value + +_WKTJSONMETHODS = { + 'google.protobuf.Any': ['_AnyMessageToJsonObject', + '_ConvertAnyMessage'], + 'google.protobuf.Duration': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.FieldMask': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.ListValue': ['_ListValueMessageToJsonObject', + '_ConvertListValueMessage'], + 'google.protobuf.Struct': ['_StructMessageToJsonObject', + '_ConvertStructMessage'], + 'google.protobuf.Timestamp': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.Value': ['_ValueMessageToJsonObject', + '_ConvertValueMessage'] +} diff --git a/openpype/hosts/hiero/vendor/google/protobuf/message.py b/openpype/hosts/hiero/vendor/google/protobuf/message.py new file mode 100644 index 0000000000..76c6802f70 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/message.py @@ -0,0 +1,424 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# TODO(robinson): We should just make these methods all "pure-virtual" and move +# all implementation out, into reflection.py for now. + + +"""Contains an abstract base class for protocol messages.""" + +__author__ = 'robinson@google.com (Will Robinson)' + +class Error(Exception): + """Base error type for this module.""" + pass + + +class DecodeError(Error): + """Exception raised when deserializing messages.""" + pass + + +class EncodeError(Error): + """Exception raised when serializing messages.""" + pass + + +class Message(object): + + """Abstract base class for protocol messages. + + Protocol message classes are almost always generated by the protocol + compiler. These generated types subclass Message and implement the methods + shown below. + """ + + # TODO(robinson): Link to an HTML document here. + + # TODO(robinson): Document that instances of this class will also + # have an Extensions attribute with __getitem__ and __setitem__. + # Again, not sure how to best convey this. + + # TODO(robinson): Document that the class must also have a static + # RegisterExtension(extension_field) method. + # Not sure how to best express at this point. + + # TODO(robinson): Document these fields and methods. + + __slots__ = [] + + #: The :class:`google.protobuf.descriptor.Descriptor` for this message type. + DESCRIPTOR = None + + def __deepcopy__(self, memo=None): + clone = type(self)() + clone.MergeFrom(self) + return clone + + def __eq__(self, other_msg): + """Recursively compares two messages by value and structure.""" + raise NotImplementedError + + def __ne__(self, other_msg): + # Can't just say self != other_msg, since that would infinitely recurse. :) + return not self == other_msg + + def __hash__(self): + raise TypeError('unhashable object') + + def __str__(self): + """Outputs a human-readable representation of the message.""" + raise NotImplementedError + + def __unicode__(self): + """Outputs a human-readable representation of the message.""" + raise NotImplementedError + + def MergeFrom(self, other_msg): + """Merges the contents of the specified message into current message. + + This method merges the contents of the specified message into the current + message. Singular fields that are set in the specified message overwrite + the corresponding fields in the current message. Repeated fields are + appended. Singular sub-messages and groups are recursively merged. + + Args: + other_msg (Message): A message to merge into the current message. + """ + raise NotImplementedError + + def CopyFrom(self, other_msg): + """Copies the content of the specified message into the current message. + + The method clears the current message and then merges the specified + message using MergeFrom. + + Args: + other_msg (Message): A message to copy into the current one. + """ + if self is other_msg: + return + self.Clear() + self.MergeFrom(other_msg) + + def Clear(self): + """Clears all data that was set in the message.""" + raise NotImplementedError + + def SetInParent(self): + """Mark this as present in the parent. + + This normally happens automatically when you assign a field of a + sub-message, but sometimes you want to make the sub-message + present while keeping it empty. If you find yourself using this, + you may want to reconsider your design. + """ + raise NotImplementedError + + def IsInitialized(self): + """Checks if the message is initialized. + + Returns: + bool: The method returns True if the message is initialized (i.e. all of + its required fields are set). + """ + raise NotImplementedError + + # TODO(robinson): MergeFromString() should probably return None and be + # implemented in terms of a helper that returns the # of bytes read. Our + # deserialization routines would use the helper when recursively + # deserializing, but the end user would almost always just want the no-return + # MergeFromString(). + + def MergeFromString(self, serialized): + """Merges serialized protocol buffer data into this message. + + When we find a field in `serialized` that is already present + in this message: + + - If it's a "repeated" field, we append to the end of our list. + - Else, if it's a scalar, we overwrite our field. + - Else, (it's a nonrepeated composite), we recursively merge + into the existing composite. + + Args: + serialized (bytes): Any object that allows us to call + ``memoryview(serialized)`` to access a string of bytes using the + buffer interface. + + Returns: + int: The number of bytes read from `serialized`. + For non-group messages, this will always be `len(serialized)`, + but for messages which are actually groups, this will + generally be less than `len(serialized)`, since we must + stop when we reach an ``END_GROUP`` tag. Note that if + we *do* stop because of an ``END_GROUP`` tag, the number + of bytes returned does not include the bytes + for the ``END_GROUP`` tag information. + + Raises: + DecodeError: if the input cannot be parsed. + """ + # TODO(robinson): Document handling of unknown fields. + # TODO(robinson): When we switch to a helper, this will return None. + raise NotImplementedError + + def ParseFromString(self, serialized): + """Parse serialized protocol buffer data into this message. + + Like :func:`MergeFromString()`, except we clear the object first. + + Raises: + message.DecodeError if the input cannot be parsed. + """ + self.Clear() + return self.MergeFromString(serialized) + + def SerializeToString(self, **kwargs): + """Serializes the protocol message to a binary string. + + Keyword Args: + deterministic (bool): If true, requests deterministic serialization + of the protobuf, with predictable ordering of map keys. + + Returns: + A binary string representation of the message if all of the required + fields in the message are set (i.e. the message is initialized). + + Raises: + EncodeError: if the message isn't initialized (see :func:`IsInitialized`). + """ + raise NotImplementedError + + def SerializePartialToString(self, **kwargs): + """Serializes the protocol message to a binary string. + + This method is similar to SerializeToString but doesn't check if the + message is initialized. + + Keyword Args: + deterministic (bool): If true, requests deterministic serialization + of the protobuf, with predictable ordering of map keys. + + Returns: + bytes: A serialized representation of the partial message. + """ + raise NotImplementedError + + # TODO(robinson): Decide whether we like these better + # than auto-generated has_foo() and clear_foo() methods + # on the instances themselves. This way is less consistent + # with C++, but it makes reflection-type access easier and + # reduces the number of magically autogenerated things. + # + # TODO(robinson): Be sure to document (and test) exactly + # which field names are accepted here. Are we case-sensitive? + # What do we do with fields that share names with Python keywords + # like 'lambda' and 'yield'? + # + # nnorwitz says: + # """ + # Typically (in python), an underscore is appended to names that are + # keywords. So they would become lambda_ or yield_. + # """ + def ListFields(self): + """Returns a list of (FieldDescriptor, value) tuples for present fields. + + A message field is non-empty if HasField() would return true. A singular + primitive field is non-empty if HasField() would return true in proto2 or it + is non zero in proto3. A repeated field is non-empty if it contains at least + one element. The fields are ordered by field number. + + Returns: + list[tuple(FieldDescriptor, value)]: field descriptors and values + for all fields in the message which are not empty. The values vary by + field type. + """ + raise NotImplementedError + + def HasField(self, field_name): + """Checks if a certain field is set for the message. + + For a oneof group, checks if any field inside is set. Note that if the + field_name is not defined in the message descriptor, :exc:`ValueError` will + be raised. + + Args: + field_name (str): The name of the field to check for presence. + + Returns: + bool: Whether a value has been set for the named field. + + Raises: + ValueError: if the `field_name` is not a member of this message. + """ + raise NotImplementedError + + def ClearField(self, field_name): + """Clears the contents of a given field. + + Inside a oneof group, clears the field set. If the name neither refers to a + defined field or oneof group, :exc:`ValueError` is raised. + + Args: + field_name (str): The name of the field to check for presence. + + Raises: + ValueError: if the `field_name` is not a member of this message. + """ + raise NotImplementedError + + def WhichOneof(self, oneof_group): + """Returns the name of the field that is set inside a oneof group. + + If no field is set, returns None. + + Args: + oneof_group (str): the name of the oneof group to check. + + Returns: + str or None: The name of the group that is set, or None. + + Raises: + ValueError: no group with the given name exists + """ + raise NotImplementedError + + def HasExtension(self, extension_handle): + """Checks if a certain extension is present for this message. + + Extensions are retrieved using the :attr:`Extensions` mapping (if present). + + Args: + extension_handle: The handle for the extension to check. + + Returns: + bool: Whether the extension is present for this message. + + Raises: + KeyError: if the extension is repeated. Similar to repeated fields, + there is no separate notion of presence: a "not present" repeated + extension is an empty list. + """ + raise NotImplementedError + + def ClearExtension(self, extension_handle): + """Clears the contents of a given extension. + + Args: + extension_handle: The handle for the extension to clear. + """ + raise NotImplementedError + + def UnknownFields(self): + """Returns the UnknownFieldSet. + + Returns: + UnknownFieldSet: The unknown fields stored in this message. + """ + raise NotImplementedError + + def DiscardUnknownFields(self): + """Clears all fields in the :class:`UnknownFieldSet`. + + This operation is recursive for nested message. + """ + raise NotImplementedError + + def ByteSize(self): + """Returns the serialized size of this message. + + Recursively calls ByteSize() on all contained messages. + + Returns: + int: The number of bytes required to serialize this message. + """ + raise NotImplementedError + + @classmethod + def FromString(cls, s): + raise NotImplementedError + + @staticmethod + def RegisterExtension(extension_handle): + raise NotImplementedError + + def _SetListener(self, message_listener): + """Internal method used by the protocol message implementation. + Clients should not call this directly. + + Sets a listener that this message will call on certain state transitions. + + The purpose of this method is to register back-edges from children to + parents at runtime, for the purpose of setting "has" bits and + byte-size-dirty bits in the parent and ancestor objects whenever a child or + descendant object is modified. + + If the client wants to disconnect this Message from the object tree, she + explicitly sets callback to None. + + If message_listener is None, unregisters any existing listener. Otherwise, + message_listener must implement the MessageListener interface in + internal/message_listener.py, and we discard any listener registered + via a previous _SetListener() call. + """ + raise NotImplementedError + + def __getstate__(self): + """Support the pickle protocol.""" + return dict(serialized=self.SerializePartialToString()) + + def __setstate__(self, state): + """Support the pickle protocol.""" + self.__init__() + serialized = state['serialized'] + # On Python 3, using encoding='latin1' is required for unpickling + # protos pickled by Python 2. + if not isinstance(serialized, bytes): + serialized = serialized.encode('latin1') + self.ParseFromString(serialized) + + def __reduce__(self): + message_descriptor = self.DESCRIPTOR + if message_descriptor.containing_type is None: + return type(self), (), self.__getstate__() + # the message type must be nested. + # Python does not pickle nested classes; use the symbol_database on the + # receiving end. + container = message_descriptor + return (_InternalConstructMessage, (container.full_name,), + self.__getstate__()) + + +def _InternalConstructMessage(full_name): + """Constructs a nested message.""" + from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top + + return symbol_database.Default().GetSymbol(full_name)() diff --git a/openpype/hosts/hiero/vendor/google/protobuf/message_factory.py b/openpype/hosts/hiero/vendor/google/protobuf/message_factory.py new file mode 100644 index 0000000000..3656fa6874 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/message_factory.py @@ -0,0 +1,185 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides a factory class for generating dynamic messages. + +The easiest way to use this class is if you have access to the FileDescriptor +protos containing the messages you want to create you can just do the following: + +message_classes = message_factory.GetMessages(iterable_of_file_descriptors) +my_proto_instance = message_classes['some.proto.package.MessageName']() +""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +from google.protobuf.internal import api_implementation +from google.protobuf import descriptor_pool +from google.protobuf import message + +if api_implementation.Type() == 'cpp': + from google.protobuf.pyext import cpp_message as message_impl +else: + from google.protobuf.internal import python_message as message_impl + + +# The type of all Message classes. +_GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType + + +class MessageFactory(object): + """Factory for creating Proto2 messages from descriptors in a pool.""" + + def __init__(self, pool=None): + """Initializes a new factory.""" + self.pool = pool or descriptor_pool.DescriptorPool() + + # local cache of all classes built from protobuf descriptors + self._classes = {} + + def GetPrototype(self, descriptor): + """Obtains a proto2 message class based on the passed in descriptor. + + Passing a descriptor with a fully qualified name matching a previous + invocation will cause the same class to be returned. + + Args: + descriptor: The descriptor to build from. + + Returns: + A class describing the passed in descriptor. + """ + if descriptor not in self._classes: + result_class = self.CreatePrototype(descriptor) + # The assignment to _classes is redundant for the base implementation, but + # might avoid confusion in cases where CreatePrototype gets overridden and + # does not call the base implementation. + self._classes[descriptor] = result_class + return result_class + return self._classes[descriptor] + + def CreatePrototype(self, descriptor): + """Builds a proto2 message class based on the passed in descriptor. + + Don't call this function directly, it always creates a new class. Call + GetPrototype() instead. This method is meant to be overridden in subblasses + to perform additional operations on the newly constructed class. + + Args: + descriptor: The descriptor to build from. + + Returns: + A class describing the passed in descriptor. + """ + descriptor_name = descriptor.name + result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE( + descriptor_name, + (message.Message,), + { + 'DESCRIPTOR': descriptor, + # If module not set, it wrongly points to message_factory module. + '__module__': None, + }) + result_class._FACTORY = self # pylint: disable=protected-access + # Assign in _classes before doing recursive calls to avoid infinite + # recursion. + self._classes[descriptor] = result_class + for field in descriptor.fields: + if field.message_type: + self.GetPrototype(field.message_type) + for extension in result_class.DESCRIPTOR.extensions: + if extension.containing_type not in self._classes: + self.GetPrototype(extension.containing_type) + extended_class = self._classes[extension.containing_type] + extended_class.RegisterExtension(extension) + return result_class + + def GetMessages(self, files): + """Gets all the messages from a specified file. + + This will find and resolve dependencies, failing if the descriptor + pool cannot satisfy them. + + Args: + files: The file names to extract messages from. + + Returns: + A dictionary mapping proto names to the message classes. This will include + any dependent messages as well as any messages defined in the same file as + a specified message. + """ + result = {} + for file_name in files: + file_desc = self.pool.FindFileByName(file_name) + for desc in file_desc.message_types_by_name.values(): + result[desc.full_name] = self.GetPrototype(desc) + + # While the extension FieldDescriptors are created by the descriptor pool, + # the python classes created in the factory need them to be registered + # explicitly, which is done below. + # + # The call to RegisterExtension will specifically check if the + # extension was already registered on the object and either + # ignore the registration if the original was the same, or raise + # an error if they were different. + + for extension in file_desc.extensions_by_name.values(): + if extension.containing_type not in self._classes: + self.GetPrototype(extension.containing_type) + extended_class = self._classes[extension.containing_type] + extended_class.RegisterExtension(extension) + return result + + +_FACTORY = MessageFactory() + + +def GetMessages(file_protos): + """Builds a dictionary of all the messages available in a set of files. + + Args: + file_protos: Iterable of FileDescriptorProto to build messages out of. + + Returns: + A dictionary mapping proto names to the message classes. This will include + any dependent messages as well as any messages defined in the same file as + a specified message. + """ + # The cpp implementation of the protocol buffer library requires to add the + # message in topological order of the dependency graph. + file_by_name = {file_proto.name: file_proto for file_proto in file_protos} + def _AddFile(file_proto): + for dependency in file_proto.dependency: + if dependency in file_by_name: + # Remove from elements to be visited, in order to cut cycles. + _AddFile(file_by_name.pop(dependency)) + _FACTORY.pool.Add(file_proto) + while file_by_name: + _AddFile(file_by_name.popitem()[1]) + return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos]) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/proto_builder.py b/openpype/hosts/hiero/vendor/google/protobuf/proto_builder.py new file mode 100644 index 0000000000..a4667ce63e --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/proto_builder.py @@ -0,0 +1,134 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Dynamic Protobuf class creator.""" + +from collections import OrderedDict +import hashlib +import os + +from google.protobuf import descriptor_pb2 +from google.protobuf import descriptor +from google.protobuf import message_factory + + +def _GetMessageFromFactory(factory, full_name): + """Get a proto class from the MessageFactory by name. + + Args: + factory: a MessageFactory instance. + full_name: str, the fully qualified name of the proto type. + Returns: + A class, for the type identified by full_name. + Raises: + KeyError, if the proto is not found in the factory's descriptor pool. + """ + proto_descriptor = factory.pool.FindMessageTypeByName(full_name) + proto_cls = factory.GetPrototype(proto_descriptor) + return proto_cls + + +def MakeSimpleProtoClass(fields, full_name=None, pool=None): + """Create a Protobuf class whose fields are basic types. + + Note: this doesn't validate field names! + + Args: + fields: dict of {name: field_type} mappings for each field in the proto. If + this is an OrderedDict the order will be maintained, otherwise the + fields will be sorted by name. + full_name: optional str, the fully-qualified name of the proto type. + pool: optional DescriptorPool instance. + Returns: + a class, the new protobuf class with a FileDescriptor. + """ + factory = message_factory.MessageFactory(pool=pool) + + if full_name is not None: + try: + proto_cls = _GetMessageFromFactory(factory, full_name) + return proto_cls + except KeyError: + # The factory's DescriptorPool doesn't know about this class yet. + pass + + # Get a list of (name, field_type) tuples from the fields dict. If fields was + # an OrderedDict we keep the order, but otherwise we sort the field to ensure + # consistent ordering. + field_items = fields.items() + if not isinstance(fields, OrderedDict): + field_items = sorted(field_items) + + # Use a consistent file name that is unlikely to conflict with any imported + # proto files. + fields_hash = hashlib.sha1() + for f_name, f_type in field_items: + fields_hash.update(f_name.encode('utf-8')) + fields_hash.update(str(f_type).encode('utf-8')) + proto_file_name = fields_hash.hexdigest() + '.proto' + + # If the proto is anonymous, use the same hash to name it. + if full_name is None: + full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' + + fields_hash.hexdigest()) + try: + proto_cls = _GetMessageFromFactory(factory, full_name) + return proto_cls + except KeyError: + # The factory's DescriptorPool doesn't know about this class yet. + pass + + # This is the first time we see this proto: add a new descriptor to the pool. + factory.pool.Add( + _MakeFileDescriptorProto(proto_file_name, full_name, field_items)) + return _GetMessageFromFactory(factory, full_name) + + +def _MakeFileDescriptorProto(proto_file_name, full_name, field_items): + """Populate FileDescriptorProto for MessageFactory's DescriptorPool.""" + package, name = full_name.rsplit('.', 1) + file_proto = descriptor_pb2.FileDescriptorProto() + file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name) + file_proto.package = package + desc_proto = file_proto.message_type.add() + desc_proto.name = name + for f_number, (f_name, f_type) in enumerate(field_items, 1): + field_proto = desc_proto.field.add() + field_proto.name = f_name + # # If the number falls in the reserved range, reassign it to the correct + # # number after the range. + if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER: + f_number += ( + descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER - + descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1) + field_proto.number = f_number + field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL + field_proto.type = f_type + return file_proto diff --git a/openpype/hosts/hiero/vendor/google/protobuf/pyext/__init__.py b/openpype/hosts/hiero/vendor/google/protobuf/pyext/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/hiero/vendor/google/protobuf/pyext/cpp_message.py b/openpype/hosts/hiero/vendor/google/protobuf/pyext/cpp_message.py new file mode 100644 index 0000000000..fc8eb32d79 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/pyext/cpp_message.py @@ -0,0 +1,65 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Protocol message implementation hooks for C++ implementation. + +Contains helper functions used to create protocol message classes from +Descriptor objects at runtime backed by the protocol buffer C++ API. +""" + +__author__ = 'tibell@google.com (Johan Tibell)' + +from google.protobuf.pyext import _message + + +class GeneratedProtocolMessageType(_message.MessageMeta): + + """Metaclass for protocol message classes created at runtime from Descriptors. + + The protocol compiler currently uses this metaclass to create protocol + message classes at runtime. Clients can also manually create their own + classes at runtime, as in this example: + + mydescriptor = Descriptor(.....) + factory = symbol_database.Default() + factory.pool.AddDescriptor(mydescriptor) + MyProtoClass = factory.GetPrototype(mydescriptor) + myproto_instance = MyProtoClass() + myproto.foo_field = 23 + ... + + The above example will not work for nested types. If you wish to include them, + use reflection.MakeClass() instead of manually instantiating the class in + order to create the appropriate class structure. + """ + + # Must be consistent with the protocol-compiler code in + # proto2/compiler/internal/generator.*. + _DESCRIPTOR_KEY = 'DESCRIPTOR' diff --git a/openpype/hosts/hiero/vendor/google/protobuf/pyext/python_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/pyext/python_pb2.py new file mode 100644 index 0000000000..2c6ecf4c98 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/pyext/python_pb2.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/pyext/python.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"google/protobuf/pyext/python.proto\x12\x1fgoogle.protobuf.python.internal\"\xbc\x02\n\x0cTestAllTypes\x12\\\n\x17repeated_nested_message\x18\x01 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\\\n\x17optional_nested_message\x18\x02 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\x16\n\x0eoptional_int32\x18\x03 \x01(\x05\x1aX\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x12;\n\x02\x63\x63\x18\x02 \x01(\x0b\x32/.google.protobuf.python.internal.ForeignMessage\"&\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\x12\t\n\x01\x64\x18\x02 \x03(\x05\"\x1d\n\x11TestAllExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02:\x9a\x01\n!optional_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x01 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage:\x9a\x01\n!repeated_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x02 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessageB\x02H\x01') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.pyext.python_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestAllExtensions.RegisterExtension(optional_nested_message_extension) + TestAllExtensions.RegisterExtension(repeated_nested_message_extension) + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'H\001' + _TESTALLTYPES._serialized_start=72 + _TESTALLTYPES._serialized_end=388 + _TESTALLTYPES_NESTEDMESSAGE._serialized_start=300 + _TESTALLTYPES_NESTEDMESSAGE._serialized_end=388 + _FOREIGNMESSAGE._serialized_start=390 + _FOREIGNMESSAGE._serialized_end=428 + _TESTALLEXTENSIONS._serialized_start=430 + _TESTALLEXTENSIONS._serialized_end=459 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/reflection.py b/openpype/hosts/hiero/vendor/google/protobuf/reflection.py new file mode 100644 index 0000000000..81e18859a8 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/reflection.py @@ -0,0 +1,95 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# This code is meant to work on Python 2.4 and above only. + +"""Contains a metaclass and helper functions used to create +protocol message classes from Descriptor objects at runtime. + +Recall that a metaclass is the "type" of a class. +(A class is to a metaclass what an instance is to a class.) + +In this case, we use the GeneratedProtocolMessageType metaclass +to inject all the useful functionality into the classes +output by the protocol compiler at compile-time. + +The upshot of all this is that the real implementation +details for ALL pure-Python protocol buffers are *here in +this file*. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + + +from google.protobuf import message_factory +from google.protobuf import symbol_database + +# The type of all Message classes. +# Part of the public interface, but normally only used by message factories. +GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE + +MESSAGE_CLASS_CACHE = {} + + +# Deprecated. Please NEVER use reflection.ParseMessage(). +def ParseMessage(descriptor, byte_str): + """Generate a new Message instance from this Descriptor and a byte string. + + DEPRECATED: ParseMessage is deprecated because it is using MakeClass(). + Please use MessageFactory.GetPrototype() instead. + + Args: + descriptor: Protobuf Descriptor object + byte_str: Serialized protocol buffer byte string + + Returns: + Newly created protobuf Message object. + """ + result_class = MakeClass(descriptor) + new_msg = result_class() + new_msg.ParseFromString(byte_str) + return new_msg + + +# Deprecated. Please NEVER use reflection.MakeClass(). +def MakeClass(descriptor): + """Construct a class object for a protobuf described by descriptor. + + DEPRECATED: use MessageFactory.GetPrototype() instead. + + Args: + descriptor: A descriptor.Descriptor object describing the protobuf. + Returns: + The Message class object described by the descriptor. + """ + # Original implementation leads to duplicate message classes, which won't play + # well with extensions. Message factory info is also missing. + # Redirect to message_factory. + return symbol_database.Default().GetPrototype(descriptor) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/service.py b/openpype/hosts/hiero/vendor/google/protobuf/service.py new file mode 100644 index 0000000000..5625246324 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/service.py @@ -0,0 +1,228 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""DEPRECATED: Declares the RPC service interfaces. + +This module declares the abstract interfaces underlying proto2 RPC +services. These are intended to be independent of any particular RPC +implementation, so that proto2 services can be used on top of a variety +of implementations. Starting with version 2.3.0, RPC implementations should +not try to build on these, but should instead provide code generator plugins +which generate code specific to the particular RPC implementation. This way +the generated code can be more appropriate for the implementation in use +and can avoid unnecessary layers of indirection. +""" + +__author__ = 'petar@google.com (Petar Petrov)' + + +class RpcException(Exception): + """Exception raised on failed blocking RPC method call.""" + pass + + +class Service(object): + + """Abstract base interface for protocol-buffer-based RPC services. + + Services themselves are abstract classes (implemented either by servers or as + stubs), but they subclass this base interface. The methods of this + interface can be used to call the methods of the service without knowing + its exact type at compile time (analogous to the Message interface). + """ + + def GetDescriptor(): + """Retrieves this service's descriptor.""" + raise NotImplementedError + + def CallMethod(self, method_descriptor, rpc_controller, + request, done): + """Calls a method of the service specified by method_descriptor. + + If "done" is None then the call is blocking and the response + message will be returned directly. Otherwise the call is asynchronous + and "done" will later be called with the response value. + + In the blocking case, RpcException will be raised on error. + + Preconditions: + + * method_descriptor.service == GetDescriptor + * request is of the exact same classes as returned by + GetRequestClass(method). + * After the call has started, the request must not be modified. + * "rpc_controller" is of the correct type for the RPC implementation being + used by this Service. For stubs, the "correct type" depends on the + RpcChannel which the stub is using. + + Postconditions: + + * "done" will be called when the method is complete. This may be + before CallMethod() returns or it may be at some point in the future. + * If the RPC failed, the response value passed to "done" will be None. + Further details about the failure can be found by querying the + RpcController. + """ + raise NotImplementedError + + def GetRequestClass(self, method_descriptor): + """Returns the class of the request message for the specified method. + + CallMethod() requires that the request is of a particular subclass of + Message. GetRequestClass() gets the default instance of this required + type. + + Example: + method = service.GetDescriptor().FindMethodByName("Foo") + request = stub.GetRequestClass(method)() + request.ParseFromString(input) + service.CallMethod(method, request, callback) + """ + raise NotImplementedError + + def GetResponseClass(self, method_descriptor): + """Returns the class of the response message for the specified method. + + This method isn't really needed, as the RpcChannel's CallMethod constructs + the response protocol message. It's provided anyway in case it is useful + for the caller to know the response type in advance. + """ + raise NotImplementedError + + +class RpcController(object): + + """An RpcController mediates a single method call. + + The primary purpose of the controller is to provide a way to manipulate + settings specific to the RPC implementation and to find out about RPC-level + errors. The methods provided by the RpcController interface are intended + to be a "least common denominator" set of features which we expect all + implementations to support. Specific implementations may provide more + advanced features (e.g. deadline propagation). + """ + + # Client-side methods below + + def Reset(self): + """Resets the RpcController to its initial state. + + After the RpcController has been reset, it may be reused in + a new call. Must not be called while an RPC is in progress. + """ + raise NotImplementedError + + def Failed(self): + """Returns true if the call failed. + + After a call has finished, returns true if the call failed. The possible + reasons for failure depend on the RPC implementation. Failed() must not + be called before a call has finished. If Failed() returns true, the + contents of the response message are undefined. + """ + raise NotImplementedError + + def ErrorText(self): + """If Failed is true, returns a human-readable description of the error.""" + raise NotImplementedError + + def StartCancel(self): + """Initiate cancellation. + + Advises the RPC system that the caller desires that the RPC call be + canceled. The RPC system may cancel it immediately, may wait awhile and + then cancel it, or may not even cancel the call at all. If the call is + canceled, the "done" callback will still be called and the RpcController + will indicate that the call failed at that time. + """ + raise NotImplementedError + + # Server-side methods below + + def SetFailed(self, reason): + """Sets a failure reason. + + Causes Failed() to return true on the client side. "reason" will be + incorporated into the message returned by ErrorText(). If you find + you need to return machine-readable information about failures, you + should incorporate it into your response protocol buffer and should + NOT call SetFailed(). + """ + raise NotImplementedError + + def IsCanceled(self): + """Checks if the client cancelled the RPC. + + If true, indicates that the client canceled the RPC, so the server may + as well give up on replying to it. The server should still call the + final "done" callback. + """ + raise NotImplementedError + + def NotifyOnCancel(self, callback): + """Sets a callback to invoke on cancel. + + Asks that the given callback be called when the RPC is canceled. The + callback will always be called exactly once. If the RPC completes without + being canceled, the callback will be called after completion. If the RPC + has already been canceled when NotifyOnCancel() is called, the callback + will be called immediately. + + NotifyOnCancel() must be called no more than once per request. + """ + raise NotImplementedError + + +class RpcChannel(object): + + """Abstract interface for an RPC channel. + + An RpcChannel represents a communication line to a service which can be used + to call that service's methods. The service may be running on another + machine. Normally, you should not use an RpcChannel directly, but instead + construct a stub {@link Service} wrapping it. Example: + + Example: + RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234") + RpcController controller = rpcImpl.Controller() + MyService service = MyService_Stub(channel) + service.MyMethod(controller, request, callback) + """ + + def CallMethod(self, method_descriptor, rpc_controller, + request, response_class, done): + """Calls the method identified by the descriptor. + + Call the given method of the remote service. The signature of this + procedure looks the same as Service.CallMethod(), but the requirements + are less strict in one important way: the request object doesn't have to + be of any specific class as long as its descriptor is method.input_type. + """ + raise NotImplementedError diff --git a/openpype/hosts/hiero/vendor/google/protobuf/service_reflection.py b/openpype/hosts/hiero/vendor/google/protobuf/service_reflection.py new file mode 100644 index 0000000000..f82ab7145a --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/service_reflection.py @@ -0,0 +1,295 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains metaclasses used to create protocol service and service stub +classes from ServiceDescriptor objects at runtime. + +The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to +inject all useful functionality into the classes output by the protocol +compiler at compile-time. +""" + +__author__ = 'petar@google.com (Petar Petrov)' + + +class GeneratedServiceType(type): + + """Metaclass for service classes created at runtime from ServiceDescriptors. + + Implementations for all methods described in the Service class are added here + by this class. We also create properties to allow getting/setting all fields + in the protocol message. + + The protocol compiler currently uses this metaclass to create protocol service + classes at runtime. Clients can also manually create their own classes at + runtime, as in this example:: + + mydescriptor = ServiceDescriptor(.....) + class MyProtoService(service.Service): + __metaclass__ = GeneratedServiceType + DESCRIPTOR = mydescriptor + myservice_instance = MyProtoService() + # ... + """ + + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __init__(cls, name, bases, dictionary): + """Creates a message service class. + + Args: + name: Name of the class (ignored, but required by the metaclass + protocol). + bases: Base classes of the class being constructed. + dictionary: The class dictionary of the class being constructed. + dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object + describing this protocol service type. + """ + # Don't do anything if this class doesn't have a descriptor. This happens + # when a service class is subclassed. + if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary: + return + + descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY] + service_builder = _ServiceBuilder(descriptor) + service_builder.BuildService(cls) + cls.DESCRIPTOR = descriptor + + +class GeneratedServiceStubType(GeneratedServiceType): + + """Metaclass for service stubs created at runtime from ServiceDescriptors. + + This class has similar responsibilities as GeneratedServiceType, except that + it creates the service stub classes. + """ + + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __init__(cls, name, bases, dictionary): + """Creates a message service stub class. + + Args: + name: Name of the class (ignored, here). + bases: Base classes of the class being constructed. + dictionary: The class dictionary of the class being constructed. + dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object + describing this protocol service type. + """ + super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary) + # Don't do anything if this class doesn't have a descriptor. This happens + # when a service stub is subclassed. + if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary: + return + + descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY] + service_stub_builder = _ServiceStubBuilder(descriptor) + service_stub_builder.BuildServiceStub(cls) + + +class _ServiceBuilder(object): + + """This class constructs a protocol service class using a service descriptor. + + Given a service descriptor, this class constructs a class that represents + the specified service descriptor. One service builder instance constructs + exactly one service class. That means all instances of that class share the + same builder. + """ + + def __init__(self, service_descriptor): + """Initializes an instance of the service class builder. + + Args: + service_descriptor: ServiceDescriptor to use when constructing the + service class. + """ + self.descriptor = service_descriptor + + def BuildService(builder, cls): + """Constructs the service class. + + Args: + cls: The class that will be constructed. + """ + + # CallMethod needs to operate with an instance of the Service class. This + # internal wrapper function exists only to be able to pass the service + # instance to the method that does the real CallMethod work. + # Making sure to use exact argument names from the abstract interface in + # service.py to match the type signature + def _WrapCallMethod(self, method_descriptor, rpc_controller, request, done): + return builder._CallMethod(self, method_descriptor, rpc_controller, + request, done) + + def _WrapGetRequestClass(self, method_descriptor): + return builder._GetRequestClass(method_descriptor) + + def _WrapGetResponseClass(self, method_descriptor): + return builder._GetResponseClass(method_descriptor) + + builder.cls = cls + cls.CallMethod = _WrapCallMethod + cls.GetDescriptor = staticmethod(lambda: builder.descriptor) + cls.GetDescriptor.__doc__ = 'Returns the service descriptor.' + cls.GetRequestClass = _WrapGetRequestClass + cls.GetResponseClass = _WrapGetResponseClass + for method in builder.descriptor.methods: + setattr(cls, method.name, builder._GenerateNonImplementedMethod(method)) + + def _CallMethod(self, srvc, method_descriptor, + rpc_controller, request, callback): + """Calls the method described by a given method descriptor. + + Args: + srvc: Instance of the service for which this method is called. + method_descriptor: Descriptor that represent the method to call. + rpc_controller: RPC controller to use for this method's execution. + request: Request protocol message. + callback: A callback to invoke after the method has completed. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'CallMethod() given method descriptor for wrong service type.') + method = getattr(srvc, method_descriptor.name) + return method(rpc_controller, request, callback) + + def _GetRequestClass(self, method_descriptor): + """Returns the class of the request protocol message. + + Args: + method_descriptor: Descriptor of the method for which to return the + request protocol message class. + + Returns: + A class that represents the input protocol message of the specified + method. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'GetRequestClass() given method descriptor for wrong service type.') + return method_descriptor.input_type._concrete_class + + def _GetResponseClass(self, method_descriptor): + """Returns the class of the response protocol message. + + Args: + method_descriptor: Descriptor of the method for which to return the + response protocol message class. + + Returns: + A class that represents the output protocol message of the specified + method. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'GetResponseClass() given method descriptor for wrong service type.') + return method_descriptor.output_type._concrete_class + + def _GenerateNonImplementedMethod(self, method): + """Generates and returns a method that can be set for a service methods. + + Args: + method: Descriptor of the service method for which a method is to be + generated. + + Returns: + A method that can be added to the service class. + """ + return lambda inst, rpc_controller, request, callback: ( + self._NonImplementedMethod(method.name, rpc_controller, callback)) + + def _NonImplementedMethod(self, method_name, rpc_controller, callback): + """The body of all methods in the generated service class. + + Args: + method_name: Name of the method being executed. + rpc_controller: RPC controller used to execute this method. + callback: A callback which will be invoked when the method finishes. + """ + rpc_controller.SetFailed('Method %s not implemented.' % method_name) + callback(None) + + +class _ServiceStubBuilder(object): + + """Constructs a protocol service stub class using a service descriptor. + + Given a service descriptor, this class constructs a suitable stub class. + A stub is just a type-safe wrapper around an RpcChannel which emulates a + local implementation of the service. + + One service stub builder instance constructs exactly one class. It means all + instances of that class share the same service stub builder. + """ + + def __init__(self, service_descriptor): + """Initializes an instance of the service stub class builder. + + Args: + service_descriptor: ServiceDescriptor to use when constructing the + stub class. + """ + self.descriptor = service_descriptor + + def BuildServiceStub(self, cls): + """Constructs the stub class. + + Args: + cls: The class that will be constructed. + """ + + def _ServiceStubInit(stub, rpc_channel): + stub.rpc_channel = rpc_channel + self.cls = cls + cls.__init__ = _ServiceStubInit + for method in self.descriptor.methods: + setattr(cls, method.name, self._GenerateStubMethod(method)) + + def _GenerateStubMethod(self, method): + return (lambda inst, rpc_controller, request, callback=None: + self._StubMethod(inst, method, rpc_controller, request, callback)) + + def _StubMethod(self, stub, method_descriptor, + rpc_controller, request, callback): + """The body of all service methods in the generated stub class. + + Args: + stub: Stub instance. + method_descriptor: Descriptor of the invoked method. + rpc_controller: Rpc controller to execute the method. + request: Request protocol message. + callback: A callback to execute when the method finishes. + Returns: + Response message (in case of blocking call). + """ + return stub.rpc_channel.CallMethod( + method_descriptor, rpc_controller, request, + method_descriptor.output_type._concrete_class, callback) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/source_context_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/source_context_pb2.py new file mode 100644 index 0000000000..30cca2e06e --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/source_context_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/source_context.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\"\"\n\rSourceContext\x12\x11\n\tfile_name\x18\x01 \x01(\tB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.source_context_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _SOURCECONTEXT._serialized_start=57 + _SOURCECONTEXT._serialized_end=91 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/struct_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/struct_pb2.py new file mode 100644 index 0000000000..149728ca08 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/struct_pb2.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/struct.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _STRUCT_FIELDSENTRY._options = None + _STRUCT_FIELDSENTRY._serialized_options = b'8\001' + _NULLVALUE._serialized_start=474 + _NULLVALUE._serialized_end=501 + _STRUCT._serialized_start=50 + _STRUCT._serialized_end=182 + _STRUCT_FIELDSENTRY._serialized_start=113 + _STRUCT_FIELDSENTRY._serialized_end=182 + _VALUE._serialized_start=185 + _VALUE._serialized_end=419 + _LISTVALUE._serialized_start=421 + _LISTVALUE._serialized_end=472 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/symbol_database.py b/openpype/hosts/hiero/vendor/google/protobuf/symbol_database.py new file mode 100644 index 0000000000..fdcf8cf06c --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/symbol_database.py @@ -0,0 +1,194 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""A database of Python protocol buffer generated symbols. + +SymbolDatabase is the MessageFactory for messages generated at compile time, +and makes it easy to create new instances of a registered type, given only the +type's protocol buffer symbol name. + +Example usage:: + + db = symbol_database.SymbolDatabase() + + # Register symbols of interest, from one or multiple files. + db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR) + db.RegisterMessage(my_proto_pb2.MyMessage) + db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR) + + # The database can be used as a MessageFactory, to generate types based on + # their name: + types = db.GetMessages(['my_proto.proto']) + my_message_instance = types['MyMessage']() + + # The database's underlying descriptor pool can be queried, so it's not + # necessary to know a type's filename to be able to generate it: + filename = db.pool.FindFileContainingSymbol('MyMessage') + my_message_instance = db.GetMessages([filename])['MyMessage']() + + # This functionality is also provided directly via a convenience method: + my_message_instance = db.GetSymbol('MyMessage')() +""" + + +from google.protobuf.internal import api_implementation +from google.protobuf import descriptor_pool +from google.protobuf import message_factory + + +class SymbolDatabase(message_factory.MessageFactory): + """A database of Python generated symbols.""" + + def RegisterMessage(self, message): + """Registers the given message type in the local database. + + Calls to GetSymbol() and GetMessages() will return messages registered here. + + Args: + message: A :class:`google.protobuf.message.Message` subclass (or + instance); its descriptor will be registered. + + Returns: + The provided message. + """ + + desc = message.DESCRIPTOR + self._classes[desc] = message + self.RegisterMessageDescriptor(desc) + return message + + def RegisterMessageDescriptor(self, message_descriptor): + """Registers the given message descriptor in the local database. + + Args: + message_descriptor (Descriptor): the message descriptor to add. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddDescriptor(message_descriptor) + + def RegisterEnumDescriptor(self, enum_descriptor): + """Registers the given enum descriptor in the local database. + + Args: + enum_descriptor (EnumDescriptor): The enum descriptor to register. + + Returns: + EnumDescriptor: The provided descriptor. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddEnumDescriptor(enum_descriptor) + return enum_descriptor + + def RegisterServiceDescriptor(self, service_descriptor): + """Registers the given service descriptor in the local database. + + Args: + service_descriptor (ServiceDescriptor): the service descriptor to + register. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddServiceDescriptor(service_descriptor) + + def RegisterFileDescriptor(self, file_descriptor): + """Registers the given file descriptor in the local database. + + Args: + file_descriptor (FileDescriptor): The file descriptor to register. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._InternalAddFileDescriptor(file_descriptor) + + def GetSymbol(self, symbol): + """Tries to find a symbol in the local database. + + Currently, this method only returns message.Message instances, however, if + may be extended in future to support other symbol types. + + Args: + symbol (str): a protocol buffer symbol. + + Returns: + A Python class corresponding to the symbol. + + Raises: + KeyError: if the symbol could not be found. + """ + + return self._classes[self.pool.FindMessageTypeByName(symbol)] + + def GetMessages(self, files): + # TODO(amauryfa): Fix the differences with MessageFactory. + """Gets all registered messages from a specified file. + + Only messages already created and registered will be returned; (this is the + case for imported _pb2 modules) + But unlike MessageFactory, this version also returns already defined nested + messages, but does not register any message extensions. + + Args: + files (list[str]): The file names to extract messages from. + + Returns: + A dictionary mapping proto names to the message classes. + + Raises: + KeyError: if a file could not be found. + """ + + def _GetAllMessages(desc): + """Walk a message Descriptor and recursively yields all message names.""" + yield desc + for msg_desc in desc.nested_types: + for nested_desc in _GetAllMessages(msg_desc): + yield nested_desc + + result = {} + for file_name in files: + file_desc = self.pool.FindFileByName(file_name) + for msg_desc in file_desc.message_types_by_name.values(): + for desc in _GetAllMessages(msg_desc): + try: + result[desc.full_name] = self._classes[desc] + except KeyError: + # This descriptor has no registered class, skip it. + pass + return result + + +_DEFAULT = SymbolDatabase(pool=descriptor_pool.Default()) + + +def Default(): + """Returns the default SymbolDatabase.""" + return _DEFAULT diff --git a/openpype/hosts/hiero/vendor/google/protobuf/text_encoding.py b/openpype/hosts/hiero/vendor/google/protobuf/text_encoding.py new file mode 100644 index 0000000000..759cf11f62 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/text_encoding.py @@ -0,0 +1,110 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Encoding related utilities.""" +import re + +_cescape_chr_to_symbol_map = {} +_cescape_chr_to_symbol_map[9] = r'\t' # optional escape +_cescape_chr_to_symbol_map[10] = r'\n' # optional escape +_cescape_chr_to_symbol_map[13] = r'\r' # optional escape +_cescape_chr_to_symbol_map[34] = r'\"' # necessary escape +_cescape_chr_to_symbol_map[39] = r"\'" # optional escape +_cescape_chr_to_symbol_map[92] = r'\\' # necessary escape + +# Lookup table for unicode +_cescape_unicode_to_str = [chr(i) for i in range(0, 256)] +for byte, string in _cescape_chr_to_symbol_map.items(): + _cescape_unicode_to_str[byte] = string + +# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32) +_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] + + [chr(i) for i in range(32, 127)] + + [r'\%03o' % i for i in range(127, 256)]) +for byte, string in _cescape_chr_to_symbol_map.items(): + _cescape_byte_to_str[byte] = string +del byte, string + + +def CEscape(text, as_utf8): + # type: (...) -> str + """Escape a bytes string for use in an text protocol buffer. + + Args: + text: A byte string to be escaped. + as_utf8: Specifies if result may contain non-ASCII characters. + In Python 3 this allows unescaped non-ASCII Unicode characters. + In Python 2 the return value will be valid UTF-8 rather than only ASCII. + Returns: + Escaped string (str). + """ + # Python's text.encode() 'string_escape' or 'unicode_escape' codecs do not + # satisfy our needs; they encodes unprintable characters using two-digit hex + # escapes whereas our C++ unescaping function allows hex escapes to be any + # length. So, "\0011".encode('string_escape') ends up being "\\x011", which + # will be decoded in C++ as a single-character string with char code 0x11. + text_is_unicode = isinstance(text, str) + if as_utf8 and text_is_unicode: + # We're already unicode, no processing beyond control char escapes. + return text.translate(_cescape_chr_to_symbol_map) + ord_ = ord if text_is_unicode else lambda x: x # bytes iterate as ints. + if as_utf8: + return ''.join(_cescape_unicode_to_str[ord_(c)] for c in text) + return ''.join(_cescape_byte_to_str[ord_(c)] for c in text) + + +_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])') + + +def CUnescape(text): + # type: (str) -> bytes + """Unescape a text string with C-style escape sequences to UTF-8 bytes. + + Args: + text: The data to parse in a str. + Returns: + A byte string. + """ + + def ReplaceHex(m): + # Only replace the match if the number of leading back slashes is odd. i.e. + # the slash itself is not escaped. + if len(m.group(1)) & 1: + return m.group(1) + 'x0' + m.group(2) + return m.group(0) + + # This is required because the 'string_escape' encoding doesn't + # allow single-digit hex escapes (like '\xf'). + result = _CUNESCAPE_HEX.sub(ReplaceHex, text) + + return (result.encode('utf-8') # Make it bytes to allow decode. + .decode('unicode_escape') + # Make it bytes again to return the proper type. + .encode('raw_unicode_escape')) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/text_format.py b/openpype/hosts/hiero/vendor/google/protobuf/text_format.py new file mode 100644 index 0000000000..412385c26f --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/text_format.py @@ -0,0 +1,1795 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains routines for printing protocol messages in text format. + +Simple usage example:: + + # Create a proto object and serialize it to a text proto string. + message = my_proto_pb2.MyMessage(foo='bar') + text_proto = text_format.MessageToString(message) + + # Parse a text proto string. + message = text_format.Parse(text_proto, my_proto_pb2.MyMessage()) +""" + +__author__ = 'kenton@google.com (Kenton Varda)' + +# TODO(b/129989314) Import thread contention leads to test failures. +import encodings.raw_unicode_escape # pylint: disable=unused-import +import encodings.unicode_escape # pylint: disable=unused-import +import io +import math +import re + +from google.protobuf.internal import decoder +from google.protobuf.internal import type_checkers +from google.protobuf import descriptor +from google.protobuf import text_encoding + +# pylint: disable=g-import-not-at-top +__all__ = ['MessageToString', 'Parse', 'PrintMessage', 'PrintField', + 'PrintFieldValue', 'Merge', 'MessageToBytes'] + +_INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(), + type_checkers.Int32ValueChecker(), + type_checkers.Uint64ValueChecker(), + type_checkers.Int64ValueChecker()) +_FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?$', re.IGNORECASE) +_FLOAT_NAN = re.compile('nanf?$', re.IGNORECASE) +_QUOTES = frozenset(("'", '"')) +_ANY_FULL_TYPE_NAME = 'google.protobuf.Any' + + +class Error(Exception): + """Top-level module error for text_format.""" + + +class ParseError(Error): + """Thrown in case of text parsing or tokenizing error.""" + + def __init__(self, message=None, line=None, column=None): + if message is not None and line is not None: + loc = str(line) + if column is not None: + loc += ':{0}'.format(column) + message = '{0} : {1}'.format(loc, message) + if message is not None: + super(ParseError, self).__init__(message) + else: + super(ParseError, self).__init__() + self._line = line + self._column = column + + def GetLine(self): + return self._line + + def GetColumn(self): + return self._column + + +class TextWriter(object): + + def __init__(self, as_utf8): + self._writer = io.StringIO() + + def write(self, val): + return self._writer.write(val) + + def close(self): + return self._writer.close() + + def getvalue(self): + return self._writer.getvalue() + + +def MessageToString( + message, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + indent=0, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + # type: (...) -> str + """Convert protobuf message to text format. + + Double values can be formatted compactly with 15 digits of + precision (which is the most that IEEE 754 "double" can guarantee) + using double_format='.15g'. To ensure that converting to text and back to a + proto will result in an identical value, double_format='.17g' should be used. + + Args: + message: The protocol buffers message. + as_utf8: Return unescaped Unicode for non-ASCII characters. + In Python 3 actual Unicode characters may appear as is in strings. + In Python 2 the return value will be valid UTF-8 rather than only ASCII. + as_one_line: Don't introduce newlines between fields. + use_short_repeated_primitives: Use short repeated format for primitives. + pointy_brackets: If True, use angle brackets instead of curly braces for + nesting. + use_index_order: If True, fields of a proto message will be printed using + the order defined in source code instead of the field number, extensions + will be printed at the end of the message and their relative order is + determined by the extension number. By default, use the field number + order. + float_format (str): If set, use this to specify float field formatting + (per the "Format Specification Mini-Language"); otherwise, shortest float + that has same value in wire will be printed. Also affect double field + if double_format is not set but float_format is set. + double_format (str): If set, use this to specify double field formatting + (per the "Format Specification Mini-Language"); if it is not set but + float_format is set, use float_format. Otherwise, use ``str()`` + use_field_number: If True, print field numbers instead of names. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + indent (int): The initial indent level, in terms of spaces, for pretty + print. + message_formatter (function(message, indent, as_one_line) -> unicode|None): + Custom formatter for selected sub-messages (usually based on message + type). Use to pretty print parts of the protobuf for easier diffing. + print_unknown_fields: If True, unknown fields will be printed. + force_colon: If set, a colon will be added after the field name even if the + field is a proto message. + + Returns: + str: A string of the text formatted protocol buffer message. + """ + out = TextWriter(as_utf8) + printer = _Printer( + out, + indent, + as_utf8, + as_one_line, + use_short_repeated_primitives, + pointy_brackets, + use_index_order, + float_format, + double_format, + use_field_number, + descriptor_pool, + message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintMessage(message) + result = out.getvalue() + out.close() + if as_one_line: + return result.rstrip() + return result + + +def MessageToBytes(message, **kwargs): + # type: (...) -> bytes + """Convert protobuf message to encoded text format. See MessageToString.""" + text = MessageToString(message, **kwargs) + if isinstance(text, bytes): + return text + codec = 'utf-8' if kwargs.get('as_utf8') else 'ascii' + return text.encode(codec) + + +def _IsMapEntry(field): + return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.message_type.has_options and + field.message_type.GetOptions().map_entry) + + +def PrintMessage(message, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + printer = _Printer( + out=out, indent=indent, as_utf8=as_utf8, + as_one_line=as_one_line, + use_short_repeated_primitives=use_short_repeated_primitives, + pointy_brackets=pointy_brackets, + use_index_order=use_index_order, + float_format=float_format, + double_format=double_format, + use_field_number=use_field_number, + descriptor_pool=descriptor_pool, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintMessage(message) + + +def PrintField(field, + value, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Print a single field name/value pair.""" + printer = _Printer(out, indent, as_utf8, as_one_line, + use_short_repeated_primitives, pointy_brackets, + use_index_order, float_format, double_format, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintField(field, value) + + +def PrintFieldValue(field, + value, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Print a single field value (not including name).""" + printer = _Printer(out, indent, as_utf8, as_one_line, + use_short_repeated_primitives, pointy_brackets, + use_index_order, float_format, double_format, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintFieldValue(field, value) + + +def _BuildMessageFromTypeName(type_name, descriptor_pool): + """Returns a protobuf message instance. + + Args: + type_name: Fully-qualified protobuf message type name string. + descriptor_pool: DescriptorPool instance. + + Returns: + A Message instance of type matching type_name, or None if the a Descriptor + wasn't found matching type_name. + """ + # pylint: disable=g-import-not-at-top + if descriptor_pool is None: + from google.protobuf import descriptor_pool as pool_mod + descriptor_pool = pool_mod.Default() + from google.protobuf import symbol_database + database = symbol_database.Default() + try: + message_descriptor = descriptor_pool.FindMessageTypeByName(type_name) + except KeyError: + return None + message_type = database.GetPrototype(message_descriptor) + return message_type() + + +# These values must match WireType enum in google/protobuf/wire_format.h. +WIRETYPE_LENGTH_DELIMITED = 2 +WIRETYPE_START_GROUP = 3 + + +class _Printer(object): + """Text format printer for protocol message.""" + + def __init__( + self, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Initialize the Printer. + + Double values can be formatted compactly with 15 digits of precision + (which is the most that IEEE 754 "double" can guarantee) using + double_format='.15g'. To ensure that converting to text and back to a proto + will result in an identical value, double_format='.17g' should be used. + + Args: + out: To record the text format result. + indent: The initial indent level for pretty print. + as_utf8: Return unescaped Unicode for non-ASCII characters. + In Python 3 actual Unicode characters may appear as is in strings. + In Python 2 the return value will be valid UTF-8 rather than ASCII. + as_one_line: Don't introduce newlines between fields. + use_short_repeated_primitives: Use short repeated format for primitives. + pointy_brackets: If True, use angle brackets instead of curly braces for + nesting. + use_index_order: If True, print fields of a proto message using the order + defined in source code instead of the field number. By default, use the + field number order. + float_format: If set, use this to specify float field formatting + (per the "Format Specification Mini-Language"); otherwise, shortest + float that has same value in wire will be printed. Also affect double + field if double_format is not set but float_format is set. + double_format: If set, use this to specify double field formatting + (per the "Format Specification Mini-Language"); if it is not set but + float_format is set, use float_format. Otherwise, str() is used. + use_field_number: If True, print field numbers instead of names. + descriptor_pool: A DescriptorPool used to resolve Any types. + message_formatter: A function(message, indent, as_one_line): unicode|None + to custom format selected sub-messages (usually based on message type). + Use to pretty print parts of the protobuf for easier diffing. + print_unknown_fields: If True, unknown fields will be printed. + force_colon: If set, a colon will be added after the field name even if + the field is a proto message. + """ + self.out = out + self.indent = indent + self.as_utf8 = as_utf8 + self.as_one_line = as_one_line + self.use_short_repeated_primitives = use_short_repeated_primitives + self.pointy_brackets = pointy_brackets + self.use_index_order = use_index_order + self.float_format = float_format + if double_format is not None: + self.double_format = double_format + else: + self.double_format = float_format + self.use_field_number = use_field_number + self.descriptor_pool = descriptor_pool + self.message_formatter = message_formatter + self.print_unknown_fields = print_unknown_fields + self.force_colon = force_colon + + def _TryPrintAsAnyMessage(self, message): + """Serializes if message is a google.protobuf.Any field.""" + if '/' not in message.type_url: + return False + packed_message = _BuildMessageFromTypeName(message.TypeName(), + self.descriptor_pool) + if packed_message: + packed_message.MergeFromString(message.value) + colon = ':' if self.force_colon else '' + self.out.write('%s[%s]%s ' % (self.indent * ' ', message.type_url, colon)) + self._PrintMessageFieldValue(packed_message) + self.out.write(' ' if self.as_one_line else '\n') + return True + else: + return False + + def _TryCustomFormatMessage(self, message): + formatted = self.message_formatter(message, self.indent, self.as_one_line) + if formatted is None: + return False + + out = self.out + out.write(' ' * self.indent) + out.write(formatted) + out.write(' ' if self.as_one_line else '\n') + return True + + def PrintMessage(self, message): + """Convert protobuf message to text format. + + Args: + message: The protocol buffers message. + """ + if self.message_formatter and self._TryCustomFormatMessage(message): + return + if (message.DESCRIPTOR.full_name == _ANY_FULL_TYPE_NAME and + self._TryPrintAsAnyMessage(message)): + return + fields = message.ListFields() + if self.use_index_order: + fields.sort( + key=lambda x: x[0].number if x[0].is_extension else x[0].index) + for field, value in fields: + if _IsMapEntry(field): + for key in sorted(value): + # This is slow for maps with submessage entries because it copies the + # entire tree. Unfortunately this would take significant refactoring + # of this file to work around. + # + # TODO(haberman): refactor and optimize if this becomes an issue. + entry_submsg = value.GetEntryClass()(key=key, value=value[key]) + self.PrintField(field, entry_submsg) + elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if (self.use_short_repeated_primitives + and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE + and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_STRING): + self._PrintShortRepeatedPrimitivesValue(field, value) + else: + for element in value: + self.PrintField(field, element) + else: + self.PrintField(field, value) + + if self.print_unknown_fields: + self._PrintUnknownFields(message.UnknownFields()) + + def _PrintUnknownFields(self, unknown_fields): + """Print unknown fields.""" + out = self.out + for field in unknown_fields: + out.write(' ' * self.indent) + out.write(str(field.field_number)) + if field.wire_type == WIRETYPE_START_GROUP: + if self.as_one_line: + out.write(' { ') + else: + out.write(' {\n') + self.indent += 2 + + self._PrintUnknownFields(field.data) + + if self.as_one_line: + out.write('} ') + else: + self.indent -= 2 + out.write(' ' * self.indent + '}\n') + elif field.wire_type == WIRETYPE_LENGTH_DELIMITED: + try: + # If this field is parseable as a Message, it is probably + # an embedded message. + # pylint: disable=protected-access + (embedded_unknown_message, pos) = decoder._DecodeUnknownFieldSet( + memoryview(field.data), 0, len(field.data)) + except Exception: # pylint: disable=broad-except + pos = 0 + + if pos == len(field.data): + if self.as_one_line: + out.write(' { ') + else: + out.write(' {\n') + self.indent += 2 + + self._PrintUnknownFields(embedded_unknown_message) + + if self.as_one_line: + out.write('} ') + else: + self.indent -= 2 + out.write(' ' * self.indent + '}\n') + else: + # A string or bytes field. self.as_utf8 may not work. + out.write(': \"') + out.write(text_encoding.CEscape(field.data, False)) + out.write('\" ' if self.as_one_line else '\"\n') + else: + # varint, fixed32, fixed64 + out.write(': ') + out.write(str(field.data)) + out.write(' ' if self.as_one_line else '\n') + + def _PrintFieldName(self, field): + """Print field name.""" + out = self.out + out.write(' ' * self.indent) + if self.use_field_number: + out.write(str(field.number)) + else: + if field.is_extension: + out.write('[') + if (field.containing_type.GetOptions().message_set_wire_format and + field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL): + out.write(field.message_type.full_name) + else: + out.write(field.full_name) + out.write(']') + elif field.type == descriptor.FieldDescriptor.TYPE_GROUP: + # For groups, use the capitalized name. + out.write(field.message_type.name) + else: + out.write(field.name) + + if (self.force_colon or + field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE): + # The colon is optional in this case, but our cross-language golden files + # don't include it. Here, the colon is only included if force_colon is + # set to True + out.write(':') + + def PrintField(self, field, value): + """Print a single field name/value pair.""" + self._PrintFieldName(field) + self.out.write(' ') + self.PrintFieldValue(field, value) + self.out.write(' ' if self.as_one_line else '\n') + + def _PrintShortRepeatedPrimitivesValue(self, field, value): + """"Prints short repeated primitives value.""" + # Note: this is called only when value has at least one element. + self._PrintFieldName(field) + self.out.write(' [') + for i in range(len(value) - 1): + self.PrintFieldValue(field, value[i]) + self.out.write(', ') + self.PrintFieldValue(field, value[-1]) + self.out.write(']') + self.out.write(' ' if self.as_one_line else '\n') + + def _PrintMessageFieldValue(self, value): + if self.pointy_brackets: + openb = '<' + closeb = '>' + else: + openb = '{' + closeb = '}' + + if self.as_one_line: + self.out.write('%s ' % openb) + self.PrintMessage(value) + self.out.write(closeb) + else: + self.out.write('%s\n' % openb) + self.indent += 2 + self.PrintMessage(value) + self.indent -= 2 + self.out.write(' ' * self.indent + closeb) + + def PrintFieldValue(self, field, value): + """Print a single field value (not including name). + + For repeated fields, the value should be a single element. + + Args: + field: The descriptor of the field to be printed. + value: The value of the field. + """ + out = self.out + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + self._PrintMessageFieldValue(value) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: + enum_value = field.enum_type.values_by_number.get(value, None) + if enum_value is not None: + out.write(enum_value.name) + else: + out.write(str(value)) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: + out.write('\"') + if isinstance(value, str) and not self.as_utf8: + out_value = value.encode('utf-8') + else: + out_value = value + if field.type == descriptor.FieldDescriptor.TYPE_BYTES: + # We always need to escape all binary data in TYPE_BYTES fields. + out_as_utf8 = False + else: + out_as_utf8 = self.as_utf8 + out.write(text_encoding.CEscape(out_value, out_as_utf8)) + out.write('\"') + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: + if value: + out.write('true') + else: + out.write('false') + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: + if self.float_format is not None: + out.write('{1:{0}}'.format(self.float_format, value)) + else: + if math.isnan(value): + out.write(str(value)) + else: + out.write(str(type_checkers.ToShortestFloat(value))) + elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_DOUBLE and + self.double_format is not None): + out.write('{1:{0}}'.format(self.double_format, value)) + else: + out.write(str(value)) + + +def Parse(text, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + NOTE: for historical reasons this function does not clear the input + message. This is different from what the binary msg.ParseFrom(...) does. + If text contains a field already set in message, the value is appended if the + field is repeated. Otherwise, an error is raised. + + Example:: + + a = MyProto() + a.repeated_field.append('test') + b = MyProto() + + # Repeated fields are combined + text_format.Parse(repr(a), b) + text_format.Parse(repr(a), b) # repeated_field contains ["test", "test"] + + # Non-repeated fields cannot be overwritten + a.singular_field = 1 + b.singular_field = 2 + text_format.Parse(repr(a), b) # ParseError + + # Binary version: + b.ParseFromString(a.SerializeToString()) # repeated_field is now "test" + + Caller is responsible for clearing the message as needed. + + Args: + text (str): Message text representation. + message (Message): A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + Message: The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + return ParseLines(text.split(b'\n' if isinstance(text, bytes) else u'\n'), + message, + allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + + +def Merge(text, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + Like Parse(), but allows repeated values for a non-repeated field, and uses + the last one. This means any non-repeated, top-level fields specified in text + replace those in the message. + + Args: + text (str): Message text representation. + message (Message): A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + Message: The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + return MergeLines( + text.split(b'\n' if isinstance(text, bytes) else u'\n'), + message, + allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + + +def ParseLines(lines, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + See Parse() for caveats. + + Args: + lines: An iterable of lines of a message's text representation. + message: A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool: A DescriptorPool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + parser = _Parser(allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + return parser.ParseLines(lines, message) + + +def MergeLines(lines, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + See Merge() for more details. + + Args: + lines: An iterable of lines of a message's text representation. + message: A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool: A DescriptorPool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + parser = _Parser(allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + return parser.MergeLines(lines, message) + + +class _Parser(object): + """Text format parser for protocol message.""" + + def __init__(self, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + self.allow_unknown_extension = allow_unknown_extension + self.allow_field_number = allow_field_number + self.descriptor_pool = descriptor_pool + self.allow_unknown_field = allow_unknown_field + + def ParseLines(self, lines, message): + """Parses a text representation of a protocol message into a message.""" + self._allow_multiple_scalars = False + self._ParseOrMerge(lines, message) + return message + + def MergeLines(self, lines, message): + """Merges a text representation of a protocol message into a message.""" + self._allow_multiple_scalars = True + self._ParseOrMerge(lines, message) + return message + + def _ParseOrMerge(self, lines, message): + """Converts a text representation of a protocol message into a message. + + Args: + lines: Lines of a message's text representation. + message: A protocol buffer message to merge into. + + Raises: + ParseError: On text parsing problems. + """ + # Tokenize expects native str lines. + str_lines = ( + line if isinstance(line, str) else line.decode('utf-8') + for line in lines) + tokenizer = Tokenizer(str_lines) + while not tokenizer.AtEnd(): + self._MergeField(tokenizer, message) + + def _MergeField(self, tokenizer, message): + """Merges a single protocol message field into a message. + + Args: + tokenizer: A tokenizer to parse the field name and values. + message: A protocol message to record the data. + + Raises: + ParseError: In case of text parsing problems. + """ + message_descriptor = message.DESCRIPTOR + if (message_descriptor.full_name == _ANY_FULL_TYPE_NAME and + tokenizer.TryConsume('[')): + type_url_prefix, packed_type_name = self._ConsumeAnyTypeUrl(tokenizer) + tokenizer.Consume(']') + tokenizer.TryConsume(':') + if tokenizer.TryConsume('<'): + expanded_any_end_token = '>' + else: + tokenizer.Consume('{') + expanded_any_end_token = '}' + expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name, + self.descriptor_pool) + if not expanded_any_sub_message: + raise ParseError('Type %s not found in descriptor pool' % + packed_type_name) + while not tokenizer.TryConsume(expanded_any_end_token): + if tokenizer.AtEnd(): + raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % + (expanded_any_end_token,)) + self._MergeField(tokenizer, expanded_any_sub_message) + deterministic = False + + message.Pack(expanded_any_sub_message, + type_url_prefix=type_url_prefix, + deterministic=deterministic) + return + + if tokenizer.TryConsume('['): + name = [tokenizer.ConsumeIdentifier()] + while tokenizer.TryConsume('.'): + name.append(tokenizer.ConsumeIdentifier()) + name = '.'.join(name) + + if not message_descriptor.is_extendable: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" does not have extensions.' % + message_descriptor.full_name) + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(name) + # pylint: enable=protected-access + + + if not field: + if self.allow_unknown_extension: + field = None + else: + raise tokenizer.ParseErrorPreviousToken( + 'Extension "%s" not registered. ' + 'Did you import the _pb2 module which defines it? ' + 'If you are trying to place the extension in the MessageSet ' + 'field of another message that is in an Any or MessageSet field, ' + 'that message\'s _pb2 module must be imported as well' % name) + elif message_descriptor != field.containing_type: + raise tokenizer.ParseErrorPreviousToken( + 'Extension "%s" does not extend message type "%s".' % + (name, message_descriptor.full_name)) + + tokenizer.Consume(']') + + else: + name = tokenizer.ConsumeIdentifierOrNumber() + if self.allow_field_number and name.isdigit(): + number = ParseInteger(name, True, True) + field = message_descriptor.fields_by_number.get(number, None) + if not field and message_descriptor.is_extendable: + field = message.Extensions._FindExtensionByNumber(number) + else: + field = message_descriptor.fields_by_name.get(name, None) + + # Group names are expected to be capitalized as they appear in the + # .proto file, which actually matches their type names, not their field + # names. + if not field: + field = message_descriptor.fields_by_name.get(name.lower(), None) + if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP: + field = None + + if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and + field.message_type.name != name): + field = None + + if not field and not self.allow_unknown_field: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" has no field named "%s".' % + (message_descriptor.full_name, name)) + + if field: + if not self._allow_multiple_scalars and field.containing_oneof: + # Check if there's a different field set in this oneof. + # Note that we ignore the case if the same field was set before, and we + # apply _allow_multiple_scalars to non-scalar fields as well. + which_oneof = message.WhichOneof(field.containing_oneof.name) + if which_oneof is not None and which_oneof != field.name: + raise tokenizer.ParseErrorPreviousToken( + 'Field "%s" is specified along with field "%s", another member ' + 'of oneof "%s" for message type "%s".' % + (field.name, which_oneof, field.containing_oneof.name, + message_descriptor.full_name)) + + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + tokenizer.TryConsume(':') + merger = self._MergeMessageField + else: + tokenizer.Consume(':') + merger = self._MergeScalarField + + if (field.label == descriptor.FieldDescriptor.LABEL_REPEATED and + tokenizer.TryConsume('[')): + # Short repeated format, e.g. "foo: [1, 2, 3]" + if not tokenizer.TryConsume(']'): + while True: + merger(tokenizer, message, field) + if tokenizer.TryConsume(']'): + break + tokenizer.Consume(',') + + else: + merger(tokenizer, message, field) + + else: # Proto field is unknown. + assert (self.allow_unknown_extension or self.allow_unknown_field) + _SkipFieldContents(tokenizer) + + # For historical reasons, fields may optionally be separated by commas or + # semicolons. + if not tokenizer.TryConsume(','): + tokenizer.TryConsume(';') + + + def _ConsumeAnyTypeUrl(self, tokenizer): + """Consumes a google.protobuf.Any type URL and returns the type name.""" + # Consume "type.googleapis.com/". + prefix = [tokenizer.ConsumeIdentifier()] + tokenizer.Consume('.') + prefix.append(tokenizer.ConsumeIdentifier()) + tokenizer.Consume('.') + prefix.append(tokenizer.ConsumeIdentifier()) + tokenizer.Consume('/') + # Consume the fully-qualified type name. + name = [tokenizer.ConsumeIdentifier()] + while tokenizer.TryConsume('.'): + name.append(tokenizer.ConsumeIdentifier()) + return '.'.join(prefix), '.'.join(name) + + def _MergeMessageField(self, tokenizer, message, field): + """Merges a single scalar field into a message. + + Args: + tokenizer: A tokenizer to parse the field value. + message: The message of which field is a member. + field: The descriptor of the field to be merged. + + Raises: + ParseError: In case of text parsing problems. + """ + is_map_entry = _IsMapEntry(field) + + if tokenizer.TryConsume('<'): + end_token = '>' + else: + tokenizer.Consume('{') + end_token = '}' + + if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if field.is_extension: + sub_message = message.Extensions[field].add() + elif is_map_entry: + sub_message = getattr(message, field.name).GetEntryClass()() + else: + sub_message = getattr(message, field.name).add() + else: + if field.is_extension: + if (not self._allow_multiple_scalars and + message.HasExtension(field)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" extensions.' % + (message.DESCRIPTOR.full_name, field.full_name)) + sub_message = message.Extensions[field] + else: + # Also apply _allow_multiple_scalars to message field. + # TODO(jieluo): Change to _allow_singular_overwrites. + if (not self._allow_multiple_scalars and + message.HasField(field.name)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" fields.' % + (message.DESCRIPTOR.full_name, field.name)) + sub_message = getattr(message, field.name) + sub_message.SetInParent() + + while not tokenizer.TryConsume(end_token): + if tokenizer.AtEnd(): + raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,)) + self._MergeField(tokenizer, sub_message) + + if is_map_entry: + value_cpptype = field.message_type.fields_by_name['value'].cpp_type + if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + value = getattr(message, field.name)[sub_message.key] + value.CopyFrom(sub_message.value) + else: + getattr(message, field.name)[sub_message.key] = sub_message.value + + @staticmethod + def _IsProto3Syntax(message): + message_descriptor = message.DESCRIPTOR + return (hasattr(message_descriptor, 'syntax') and + message_descriptor.syntax == 'proto3') + + def _MergeScalarField(self, tokenizer, message, field): + """Merges a single scalar field into a message. + + Args: + tokenizer: A tokenizer to parse the field value. + message: A protocol message to record the data. + field: The descriptor of the field to be merged. + + Raises: + ParseError: In case of text parsing problems. + RuntimeError: On runtime errors. + """ + _ = self.allow_unknown_extension + value = None + + if field.type in (descriptor.FieldDescriptor.TYPE_INT32, + descriptor.FieldDescriptor.TYPE_SINT32, + descriptor.FieldDescriptor.TYPE_SFIXED32): + value = _ConsumeInt32(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_INT64, + descriptor.FieldDescriptor.TYPE_SINT64, + descriptor.FieldDescriptor.TYPE_SFIXED64): + value = _ConsumeInt64(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32, + descriptor.FieldDescriptor.TYPE_FIXED32): + value = _ConsumeUint32(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64, + descriptor.FieldDescriptor.TYPE_FIXED64): + value = _ConsumeUint64(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT, + descriptor.FieldDescriptor.TYPE_DOUBLE): + value = tokenizer.ConsumeFloat() + elif field.type == descriptor.FieldDescriptor.TYPE_BOOL: + value = tokenizer.ConsumeBool() + elif field.type == descriptor.FieldDescriptor.TYPE_STRING: + value = tokenizer.ConsumeString() + elif field.type == descriptor.FieldDescriptor.TYPE_BYTES: + value = tokenizer.ConsumeByteString() + elif field.type == descriptor.FieldDescriptor.TYPE_ENUM: + value = tokenizer.ConsumeEnum(field) + else: + raise RuntimeError('Unknown field type %d' % field.type) + + if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if field.is_extension: + message.Extensions[field].append(value) + else: + getattr(message, field.name).append(value) + else: + if field.is_extension: + if (not self._allow_multiple_scalars and + not self._IsProto3Syntax(message) and + message.HasExtension(field)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" extensions.' % + (message.DESCRIPTOR.full_name, field.full_name)) + else: + message.Extensions[field] = value + else: + duplicate_error = False + if not self._allow_multiple_scalars: + if self._IsProto3Syntax(message): + # Proto3 doesn't represent presence so we try best effort to check + # multiple scalars by compare to default values. + duplicate_error = bool(getattr(message, field.name)) + else: + duplicate_error = message.HasField(field.name) + + if duplicate_error: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" fields.' % + (message.DESCRIPTOR.full_name, field.name)) + else: + setattr(message, field.name, value) + + +def _SkipFieldContents(tokenizer): + """Skips over contents (value or message) of a field. + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + # Try to guess the type of this field. + # If this field is not a message, there should be a ":" between the + # field name and the field value and also the field value should not + # start with "{" or "<" which indicates the beginning of a message body. + # If there is no ":" or there is a "{" or "<" after ":", this field has + # to be a message or the input is ill-formed. + if tokenizer.TryConsume(':') and not tokenizer.LookingAt( + '{') and not tokenizer.LookingAt('<'): + _SkipFieldValue(tokenizer) + else: + _SkipFieldMessage(tokenizer) + + +def _SkipField(tokenizer): + """Skips over a complete field (name and value/message). + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + if tokenizer.TryConsume('['): + # Consume extension name. + tokenizer.ConsumeIdentifier() + while tokenizer.TryConsume('.'): + tokenizer.ConsumeIdentifier() + tokenizer.Consume(']') + else: + tokenizer.ConsumeIdentifierOrNumber() + + _SkipFieldContents(tokenizer) + + # For historical reasons, fields may optionally be separated by commas or + # semicolons. + if not tokenizer.TryConsume(','): + tokenizer.TryConsume(';') + + +def _SkipFieldMessage(tokenizer): + """Skips over a field message. + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + + if tokenizer.TryConsume('<'): + delimiter = '>' + else: + tokenizer.Consume('{') + delimiter = '}' + + while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'): + _SkipField(tokenizer) + + tokenizer.Consume(delimiter) + + +def _SkipFieldValue(tokenizer): + """Skips over a field value. + + Args: + tokenizer: A tokenizer to parse the field name and values. + + Raises: + ParseError: In case an invalid field value is found. + """ + # String/bytes tokens can come in multiple adjacent string literals. + # If we can consume one, consume as many as we can. + if tokenizer.TryConsumeByteString(): + while tokenizer.TryConsumeByteString(): + pass + return + + if (not tokenizer.TryConsumeIdentifier() and + not _TryConsumeInt64(tokenizer) and not _TryConsumeUint64(tokenizer) and + not tokenizer.TryConsumeFloat()): + raise ParseError('Invalid field value: ' + tokenizer.token) + + +class Tokenizer(object): + """Protocol buffer text representation tokenizer. + + This class handles the lower level string parsing by splitting it into + meaningful tokens. + + It was directly ported from the Java protocol buffer API. + """ + + _WHITESPACE = re.compile(r'\s+') + _COMMENT = re.compile(r'(\s*#.*$)', re.MULTILINE) + _WHITESPACE_OR_COMMENT = re.compile(r'(\s|(#.*$))+', re.MULTILINE) + _TOKEN = re.compile('|'.join([ + r'[a-zA-Z_][0-9a-zA-Z_+-]*', # an identifier + r'([0-9+-]|(\.[0-9]))[0-9a-zA-Z_.+-]*', # a number + ] + [ # quoted str for each quote mark + # Avoid backtracking! https://stackoverflow.com/a/844267 + r'{qt}[^{qt}\n\\]*((\\.)+[^{qt}\n\\]*)*({qt}|\\?$)'.format(qt=mark) + for mark in _QUOTES + ])) + + _IDENTIFIER = re.compile(r'[^\d\W]\w*') + _IDENTIFIER_OR_NUMBER = re.compile(r'\w+') + + def __init__(self, lines, skip_comments=True): + self._position = 0 + self._line = -1 + self._column = 0 + self._token_start = None + self.token = '' + self._lines = iter(lines) + self._current_line = '' + self._previous_line = 0 + self._previous_column = 0 + self._more_lines = True + self._skip_comments = skip_comments + self._whitespace_pattern = (skip_comments and self._WHITESPACE_OR_COMMENT + or self._WHITESPACE) + self._SkipWhitespace() + self.NextToken() + + def LookingAt(self, token): + return self.token == token + + def AtEnd(self): + """Checks the end of the text was reached. + + Returns: + True iff the end was reached. + """ + return not self.token + + def _PopLine(self): + while len(self._current_line) <= self._column: + try: + self._current_line = next(self._lines) + except StopIteration: + self._current_line = '' + self._more_lines = False + return + else: + self._line += 1 + self._column = 0 + + def _SkipWhitespace(self): + while True: + self._PopLine() + match = self._whitespace_pattern.match(self._current_line, self._column) + if not match: + break + length = len(match.group(0)) + self._column += length + + def TryConsume(self, token): + """Tries to consume a given piece of text. + + Args: + token: Text to consume. + + Returns: + True iff the text was consumed. + """ + if self.token == token: + self.NextToken() + return True + return False + + def Consume(self, token): + """Consumes a piece of text. + + Args: + token: Text to consume. + + Raises: + ParseError: If the text couldn't be consumed. + """ + if not self.TryConsume(token): + raise self.ParseError('Expected "%s".' % token) + + def ConsumeComment(self): + result = self.token + if not self._COMMENT.match(result): + raise self.ParseError('Expected comment.') + self.NextToken() + return result + + def ConsumeCommentOrTrailingComment(self): + """Consumes a comment, returns a 2-tuple (trailing bool, comment str).""" + + # Tokenizer initializes _previous_line and _previous_column to 0. As the + # tokenizer starts, it looks like there is a previous token on the line. + just_started = self._line == 0 and self._column == 0 + + before_parsing = self._previous_line + comment = self.ConsumeComment() + + # A trailing comment is a comment on the same line than the previous token. + trailing = (self._previous_line == before_parsing + and not just_started) + + return trailing, comment + + def TryConsumeIdentifier(self): + try: + self.ConsumeIdentifier() + return True + except ParseError: + return False + + def ConsumeIdentifier(self): + """Consumes protocol message field identifier. + + Returns: + Identifier string. + + Raises: + ParseError: If an identifier couldn't be consumed. + """ + result = self.token + if not self._IDENTIFIER.match(result): + raise self.ParseError('Expected identifier.') + self.NextToken() + return result + + def TryConsumeIdentifierOrNumber(self): + try: + self.ConsumeIdentifierOrNumber() + return True + except ParseError: + return False + + def ConsumeIdentifierOrNumber(self): + """Consumes protocol message field identifier. + + Returns: + Identifier string. + + Raises: + ParseError: If an identifier couldn't be consumed. + """ + result = self.token + if not self._IDENTIFIER_OR_NUMBER.match(result): + raise self.ParseError('Expected identifier or number, got %s.' % result) + self.NextToken() + return result + + def TryConsumeInteger(self): + try: + self.ConsumeInteger() + return True + except ParseError: + return False + + def ConsumeInteger(self): + """Consumes an integer number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an integer couldn't be consumed. + """ + try: + result = _ParseAbstractInteger(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def TryConsumeFloat(self): + try: + self.ConsumeFloat() + return True + except ParseError: + return False + + def ConsumeFloat(self): + """Consumes an floating point number. + + Returns: + The number parsed. + + Raises: + ParseError: If a floating point number couldn't be consumed. + """ + try: + result = ParseFloat(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ConsumeBool(self): + """Consumes a boolean value. + + Returns: + The bool parsed. + + Raises: + ParseError: If a boolean value couldn't be consumed. + """ + try: + result = ParseBool(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def TryConsumeByteString(self): + try: + self.ConsumeByteString() + return True + except ParseError: + return False + + def ConsumeString(self): + """Consumes a string value. + + Returns: + The string parsed. + + Raises: + ParseError: If a string value couldn't be consumed. + """ + the_bytes = self.ConsumeByteString() + try: + return str(the_bytes, 'utf-8') + except UnicodeDecodeError as e: + raise self._StringParseError(e) + + def ConsumeByteString(self): + """Consumes a byte array value. + + Returns: + The array parsed (as a string). + + Raises: + ParseError: If a byte array value couldn't be consumed. + """ + the_list = [self._ConsumeSingleByteString()] + while self.token and self.token[0] in _QUOTES: + the_list.append(self._ConsumeSingleByteString()) + return b''.join(the_list) + + def _ConsumeSingleByteString(self): + """Consume one token of a string literal. + + String literals (whether bytes or text) can come in multiple adjacent + tokens which are automatically concatenated, like in C or Python. This + method only consumes one token. + + Returns: + The token parsed. + Raises: + ParseError: When the wrong format data is found. + """ + text = self.token + if len(text) < 1 or text[0] not in _QUOTES: + raise self.ParseError('Expected string but found: %r' % (text,)) + + if len(text) < 2 or text[-1] != text[0]: + raise self.ParseError('String missing ending quote: %r' % (text,)) + + try: + result = text_encoding.CUnescape(text[1:-1]) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ConsumeEnum(self, field): + try: + result = ParseEnum(field, self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ParseErrorPreviousToken(self, message): + """Creates and *returns* a ParseError for the previously read token. + + Args: + message: A message to set for the exception. + + Returns: + A ParseError instance. + """ + return ParseError(message, self._previous_line + 1, + self._previous_column + 1) + + def ParseError(self, message): + """Creates and *returns* a ParseError for the current token.""" + return ParseError('\'' + self._current_line + '\': ' + message, + self._line + 1, self._column + 1) + + def _StringParseError(self, e): + return self.ParseError('Couldn\'t parse string: ' + str(e)) + + def NextToken(self): + """Reads the next meaningful token.""" + self._previous_line = self._line + self._previous_column = self._column + + self._column += len(self.token) + self._SkipWhitespace() + + if not self._more_lines: + self.token = '' + return + + match = self._TOKEN.match(self._current_line, self._column) + if not match and not self._skip_comments: + match = self._COMMENT.match(self._current_line, self._column) + if match: + token = match.group(0) + self.token = token + else: + self.token = self._current_line[self._column] + +# Aliased so it can still be accessed by current visibility violators. +# TODO(dbarnett): Migrate violators to textformat_tokenizer. +_Tokenizer = Tokenizer # pylint: disable=invalid-name + + +def _ConsumeInt32(tokenizer): + """Consumes a signed 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If a signed 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=True, is_long=False) + + +def _ConsumeUint32(tokenizer): + """Consumes an unsigned 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an unsigned 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=False, is_long=False) + + +def _TryConsumeInt64(tokenizer): + try: + _ConsumeInt64(tokenizer) + return True + except ParseError: + return False + + +def _ConsumeInt64(tokenizer): + """Consumes a signed 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If a signed 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=True, is_long=True) + + +def _TryConsumeUint64(tokenizer): + try: + _ConsumeUint64(tokenizer) + return True + except ParseError: + return False + + +def _ConsumeUint64(tokenizer): + """Consumes an unsigned 64bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an unsigned 64bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=False, is_long=True) + + +def _ConsumeInteger(tokenizer, is_signed=False, is_long=False): + """Consumes an integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + is_signed: True if a signed integer must be parsed. + is_long: True if a long integer must be parsed. + + Returns: + The integer parsed. + + Raises: + ParseError: If an integer with given characteristics couldn't be consumed. + """ + try: + result = ParseInteger(tokenizer.token, is_signed=is_signed, is_long=is_long) + except ValueError as e: + raise tokenizer.ParseError(str(e)) + tokenizer.NextToken() + return result + + +def ParseInteger(text, is_signed=False, is_long=False): + """Parses an integer. + + Args: + text: The text to parse. + is_signed: True if a signed integer must be parsed. + is_long: True if a long integer must be parsed. + + Returns: + The integer value. + + Raises: + ValueError: Thrown Iff the text is not a valid integer. + """ + # Do the actual parsing. Exception handling is propagated to caller. + result = _ParseAbstractInteger(text) + + # Check if the integer is sane. Exceptions handled by callers. + checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)] + checker.CheckValue(result) + return result + + +def _ParseAbstractInteger(text): + """Parses an integer without checking size/signedness. + + Args: + text: The text to parse. + + Returns: + The integer value. + + Raises: + ValueError: Thrown Iff the text is not a valid integer. + """ + # Do the actual parsing. Exception handling is propagated to caller. + orig_text = text + c_octal_match = re.match(r'(-?)0(\d+)$', text) + if c_octal_match: + # Python 3 no longer supports 0755 octal syntax without the 'o', so + # we always use the '0o' prefix for multi-digit numbers starting with 0. + text = c_octal_match.group(1) + '0o' + c_octal_match.group(2) + try: + return int(text, 0) + except ValueError: + raise ValueError('Couldn\'t parse integer: %s' % orig_text) + + +def ParseFloat(text): + """Parse a floating point number. + + Args: + text: Text to parse. + + Returns: + The number parsed. + + Raises: + ValueError: If a floating point number couldn't be parsed. + """ + try: + # Assume Python compatible syntax. + return float(text) + except ValueError: + # Check alternative spellings. + if _FLOAT_INFINITY.match(text): + if text[0] == '-': + return float('-inf') + else: + return float('inf') + elif _FLOAT_NAN.match(text): + return float('nan') + else: + # assume '1.0f' format + try: + return float(text.rstrip('f')) + except ValueError: + raise ValueError('Couldn\'t parse float: %s' % text) + + +def ParseBool(text): + """Parse a boolean value. + + Args: + text: Text to parse. + + Returns: + Boolean values parsed + + Raises: + ValueError: If text is not a valid boolean. + """ + if text in ('true', 't', '1', 'True'): + return True + elif text in ('false', 'f', '0', 'False'): + return False + else: + raise ValueError('Expected "true" or "false".') + + +def ParseEnum(field, value): + """Parse an enum value. + + The value can be specified by a number (the enum value), or by + a string literal (the enum name). + + Args: + field: Enum field descriptor. + value: String value. + + Returns: + Enum value number. + + Raises: + ValueError: If the enum value could not be parsed. + """ + enum_descriptor = field.enum_type + try: + number = int(value, 0) + except ValueError: + # Identifier. + enum_value = enum_descriptor.values_by_name.get(value, None) + if enum_value is None: + raise ValueError('Enum type "%s" has no value named %s.' % + (enum_descriptor.full_name, value)) + else: + # Numeric value. + if hasattr(field.file, 'syntax'): + # Attribute is checked for compatibility. + if field.file.syntax == 'proto3': + # Proto3 accept numeric unknown enums. + return number + enum_value = enum_descriptor.values_by_number.get(number, None) + if enum_value is None: + raise ValueError('Enum type "%s" has no value with number %d.' % + (enum_descriptor.full_name, number)) + return enum_value.number diff --git a/openpype/hosts/hiero/vendor/google/protobuf/timestamp_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/timestamp_pb2.py new file mode 100644 index 0000000000..558d496941 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/timestamp_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/timestamp.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x85\x01\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01Z2google.golang.org/protobuf/types/known/timestamppb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.timestamp_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016TimestampProtoP\001Z2google.golang.org/protobuf/types/known/timestamppb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _TIMESTAMP._serialized_start=52 + _TIMESTAMP._serialized_end=95 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/type_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/type_pb2.py new file mode 100644 index 0000000000..19903fb6b4 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/type_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/type.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1agoogle/protobuf/type.proto\x12\x0fgoogle.protobuf\x1a\x19google/protobuf/any.proto\x1a$google/protobuf/source_context.proto\"\xd7\x01\n\x04Type\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Field\x12\x0e\n\x06oneofs\x18\x03 \x03(\t\x12(\n\x07options\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x06 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x05\n\x05\x46ield\x12)\n\x04kind\x18\x01 \x01(\x0e\x32\x1b.google.protobuf.Field.Kind\x12\x37\n\x0b\x63\x61rdinality\x18\x02 \x01(\x0e\x32\".google.protobuf.Field.Cardinality\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x10\n\x08type_url\x18\x06 \x01(\t\x12\x13\n\x0boneof_index\x18\x07 \x01(\x05\x12\x0e\n\x06packed\x18\x08 \x01(\x08\x12(\n\x07options\x18\t \x03(\x0b\x32\x17.google.protobuf.Option\x12\x11\n\tjson_name\x18\n \x01(\t\x12\x15\n\rdefault_value\x18\x0b \x01(\t\"\xc8\x02\n\x04Kind\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"t\n\x0b\x43\x61rdinality\x12\x17\n\x13\x43\x41RDINALITY_UNKNOWN\x10\x00\x12\x18\n\x14\x43\x41RDINALITY_OPTIONAL\x10\x01\x12\x18\n\x14\x43\x41RDINALITY_REQUIRED\x10\x02\x12\x18\n\x14\x43\x41RDINALITY_REPEATED\x10\x03\"\xce\x01\n\x04\x45num\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tenumvalue\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.EnumValue\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x05 \x01(\x0e\x32\x17.google.protobuf.Syntax\"S\n\tEnumValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\";\n\x06Option\x12\x0c\n\x04name\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any*.\n\x06Syntax\x12\x11\n\rSYNTAX_PROTO2\x10\x00\x12\x11\n\rSYNTAX_PROTO3\x10\x01\x42{\n\x13\x63om.google.protobufB\tTypeProtoP\x01Z-google.golang.org/protobuf/types/known/typepb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.type_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\tTypeProtoP\001Z-google.golang.org/protobuf/types/known/typepb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _SYNTAX._serialized_start=1413 + _SYNTAX._serialized_end=1459 + _TYPE._serialized_start=113 + _TYPE._serialized_end=328 + _FIELD._serialized_start=331 + _FIELD._serialized_end=1056 + _FIELD_KIND._serialized_start=610 + _FIELD_KIND._serialized_end=938 + _FIELD_CARDINALITY._serialized_start=940 + _FIELD_CARDINALITY._serialized_end=1056 + _ENUM._serialized_start=1059 + _ENUM._serialized_end=1265 + _ENUMVALUE._serialized_start=1267 + _ENUMVALUE._serialized_end=1350 + _OPTION._serialized_start=1352 + _OPTION._serialized_end=1411 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/util/__init__.py b/openpype/hosts/hiero/vendor/google/protobuf/util/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_pb2.py new file mode 100644 index 0000000000..66a5836c82 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_pb2.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/util/json_format.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&google/protobuf/util/json_format.proto\x12\x11protobuf_unittest\"\x89\x01\n\x13TestFlagsAndStrings\x12\t\n\x01\x41\x18\x01 \x02(\x05\x12K\n\rrepeatedgroup\x18\x02 \x03(\n24.protobuf_unittest.TestFlagsAndStrings.RepeatedGroup\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x66\x18\x03 \x02(\t\"!\n\x14TestBase64ByteArrays\x12\t\n\x01\x61\x18\x01 \x02(\x0c\"G\n\x12TestJavaScriptJSON\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x05\x66inal\x18\x02 \x01(\x02\x12\n\n\x02in\x18\x03 \x01(\t\x12\x0b\n\x03Var\x18\x04 \x01(\t\"Q\n\x18TestJavaScriptOrderJSON1\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\"\x89\x01\n\x18TestJavaScriptOrderJSON2\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\x36\n\x01z\x18\x06 \x03(\x0b\x32+.protobuf_unittest.TestJavaScriptOrderJSON1\"$\n\x0cTestLargeInt\x12\t\n\x01\x61\x18\x01 \x02(\x03\x12\t\n\x01\x62\x18\x02 \x02(\x04\"\xa0\x01\n\x0bTestNumbers\x12\x30\n\x01\x61\x18\x01 \x01(\x0e\x32%.protobuf_unittest.TestNumbers.MyType\x12\t\n\x01\x62\x18\x02 \x01(\x05\x12\t\n\x01\x63\x18\x03 \x01(\x02\x12\t\n\x01\x64\x18\x04 \x01(\x08\x12\t\n\x01\x65\x18\x05 \x01(\x01\x12\t\n\x01\x66\x18\x06 \x01(\r\"(\n\x06MyType\x12\x06\n\x02OK\x10\x00\x12\x0b\n\x07WARNING\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"T\n\rTestCamelCase\x12\x14\n\x0cnormal_field\x18\x01 \x01(\t\x12\x15\n\rCAPITAL_FIELD\x18\x02 \x01(\x05\x12\x16\n\x0e\x43\x61melCaseField\x18\x03 \x01(\x05\"|\n\x0bTestBoolMap\x12=\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32+.protobuf_unittest.TestBoolMap.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"O\n\rTestRecursion\x12\r\n\x05value\x18\x01 \x01(\x05\x12/\n\x05\x63hild\x18\x02 \x01(\x0b\x32 .protobuf_unittest.TestRecursion\"\x86\x01\n\rTestStringMap\x12\x43\n\nstring_map\x18\x01 \x03(\x0b\x32/.protobuf_unittest.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc4\x01\n\x14TestStringSerializer\x12\x15\n\rscalar_string\x18\x01 \x01(\t\x12\x17\n\x0frepeated_string\x18\x02 \x03(\t\x12J\n\nstring_map\x18\x03 \x03(\x0b\x32\x36.protobuf_unittest.TestStringSerializer.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"$\n\x18TestMessageWithExtension*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"z\n\rTestExtension\x12\r\n\x05value\x18\x01 \x01(\t2Z\n\x03\x65xt\x12+.protobuf_unittest.TestMessageWithExtension\x18\x64 \x01(\x0b\x32 .protobuf_unittest.TestExtension\"Q\n\x14TestDefaultEnumValue\x12\x39\n\nenum_value\x18\x01 \x01(\x0e\x32\x1c.protobuf_unittest.EnumValue:\x07\x44\x45\x46\x41ULT*2\n\tEnumValue\x12\x0c\n\x08PROTOCOL\x10\x00\x12\n\n\x06\x42UFFER\x10\x01\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x02') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestMessageWithExtension.RegisterExtension(_TESTEXTENSION.extensions_by_name['ext']) + + DESCRIPTOR._options = None + _TESTBOOLMAP_BOOLMAPENTRY._options = None + _TESTBOOLMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGMAP_STRINGMAPENTRY._options = None + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._options = None + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_options = b'8\001' + _ENUMVALUE._serialized_start=1607 + _ENUMVALUE._serialized_end=1657 + _TESTFLAGSANDSTRINGS._serialized_start=62 + _TESTFLAGSANDSTRINGS._serialized_end=199 + _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_start=173 + _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_end=199 + _TESTBASE64BYTEARRAYS._serialized_start=201 + _TESTBASE64BYTEARRAYS._serialized_end=234 + _TESTJAVASCRIPTJSON._serialized_start=236 + _TESTJAVASCRIPTJSON._serialized_end=307 + _TESTJAVASCRIPTORDERJSON1._serialized_start=309 + _TESTJAVASCRIPTORDERJSON1._serialized_end=390 + _TESTJAVASCRIPTORDERJSON2._serialized_start=393 + _TESTJAVASCRIPTORDERJSON2._serialized_end=530 + _TESTLARGEINT._serialized_start=532 + _TESTLARGEINT._serialized_end=568 + _TESTNUMBERS._serialized_start=571 + _TESTNUMBERS._serialized_end=731 + _TESTNUMBERS_MYTYPE._serialized_start=691 + _TESTNUMBERS_MYTYPE._serialized_end=731 + _TESTCAMELCASE._serialized_start=733 + _TESTCAMELCASE._serialized_end=817 + _TESTBOOLMAP._serialized_start=819 + _TESTBOOLMAP._serialized_end=943 + _TESTBOOLMAP_BOOLMAPENTRY._serialized_start=897 + _TESTBOOLMAP_BOOLMAPENTRY._serialized_end=943 + _TESTRECURSION._serialized_start=945 + _TESTRECURSION._serialized_end=1024 + _TESTSTRINGMAP._serialized_start=1027 + _TESTSTRINGMAP._serialized_end=1161 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=1113 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=1161 + _TESTSTRINGSERIALIZER._serialized_start=1164 + _TESTSTRINGSERIALIZER._serialized_end=1360 + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_start=1113 + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_end=1161 + _TESTMESSAGEWITHEXTENSION._serialized_start=1362 + _TESTMESSAGEWITHEXTENSION._serialized_end=1398 + _TESTEXTENSION._serialized_start=1400 + _TESTEXTENSION._serialized_end=1522 + _TESTDEFAULTENUMVALUE._serialized_start=1524 + _TESTDEFAULTENUMVALUE._serialized_end=1605 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_proto3_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_proto3_pb2.py new file mode 100644 index 0000000000..5498deafa9 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_proto3_pb2.py @@ -0,0 +1,129 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/util/json_format_proto3.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 +from google.protobuf import unittest_pb2 as google_dot_protobuf_dot_unittest__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n-google/protobuf/util/json_format_proto3.proto\x12\x06proto3\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1egoogle/protobuf/unittest.proto\"\x1c\n\x0bMessageType\x12\r\n\x05value\x18\x01 \x01(\x05\"\x94\x05\n\x0bTestMessage\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x13\n\x0bint32_value\x18\x02 \x01(\x05\x12\x13\n\x0bint64_value\x18\x03 \x01(\x03\x12\x14\n\x0cuint32_value\x18\x04 \x01(\r\x12\x14\n\x0cuint64_value\x18\x05 \x01(\x04\x12\x13\n\x0b\x66loat_value\x18\x06 \x01(\x02\x12\x14\n\x0c\x64ouble_value\x18\x07 \x01(\x01\x12\x14\n\x0cstring_value\x18\x08 \x01(\t\x12\x13\n\x0b\x62ytes_value\x18\t \x01(\x0c\x12$\n\nenum_value\x18\n \x01(\x0e\x32\x10.proto3.EnumType\x12*\n\rmessage_value\x18\x0b \x01(\x0b\x32\x13.proto3.MessageType\x12\x1b\n\x13repeated_bool_value\x18\x15 \x03(\x08\x12\x1c\n\x14repeated_int32_value\x18\x16 \x03(\x05\x12\x1c\n\x14repeated_int64_value\x18\x17 \x03(\x03\x12\x1d\n\x15repeated_uint32_value\x18\x18 \x03(\r\x12\x1d\n\x15repeated_uint64_value\x18\x19 \x03(\x04\x12\x1c\n\x14repeated_float_value\x18\x1a \x03(\x02\x12\x1d\n\x15repeated_double_value\x18\x1b \x03(\x01\x12\x1d\n\x15repeated_string_value\x18\x1c \x03(\t\x12\x1c\n\x14repeated_bytes_value\x18\x1d \x03(\x0c\x12-\n\x13repeated_enum_value\x18\x1e \x03(\x0e\x32\x10.proto3.EnumType\x12\x33\n\x16repeated_message_value\x18\x1f \x03(\x0b\x32\x13.proto3.MessageType\"\x8c\x02\n\tTestOneof\x12\x1b\n\x11oneof_int32_value\x18\x01 \x01(\x05H\x00\x12\x1c\n\x12oneof_string_value\x18\x02 \x01(\tH\x00\x12\x1b\n\x11oneof_bytes_value\x18\x03 \x01(\x0cH\x00\x12,\n\x10oneof_enum_value\x18\x04 \x01(\x0e\x32\x10.proto3.EnumTypeH\x00\x12\x32\n\x13oneof_message_value\x18\x05 \x01(\x0b\x32\x13.proto3.MessageTypeH\x00\x12\x36\n\x10oneof_null_value\x18\x06 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x42\r\n\x0boneof_value\"\xe1\x04\n\x07TestMap\x12.\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\x1c.proto3.TestMap.BoolMapEntry\x12\x30\n\tint32_map\x18\x02 \x03(\x0b\x32\x1d.proto3.TestMap.Int32MapEntry\x12\x30\n\tint64_map\x18\x03 \x03(\x0b\x32\x1d.proto3.TestMap.Int64MapEntry\x12\x32\n\nuint32_map\x18\x04 \x03(\x0b\x32\x1e.proto3.TestMap.Uint32MapEntry\x12\x32\n\nuint64_map\x18\x05 \x03(\x0b\x32\x1e.proto3.TestMap.Uint64MapEntry\x12\x32\n\nstring_map\x18\x06 \x03(\x0b\x32\x1e.proto3.TestMap.StringMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\x85\x06\n\rTestNestedMap\x12\x34\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\".proto3.TestNestedMap.BoolMapEntry\x12\x36\n\tint32_map\x18\x02 \x03(\x0b\x32#.proto3.TestNestedMap.Int32MapEntry\x12\x36\n\tint64_map\x18\x03 \x03(\x0b\x32#.proto3.TestNestedMap.Int64MapEntry\x12\x38\n\nuint32_map\x18\x04 \x03(\x0b\x32$.proto3.TestNestedMap.Uint32MapEntry\x12\x38\n\nuint64_map\x18\x05 \x03(\x0b\x32$.proto3.TestNestedMap.Uint64MapEntry\x12\x38\n\nstring_map\x18\x06 \x03(\x0b\x32$.proto3.TestNestedMap.StringMapEntry\x12\x32\n\x07map_map\x18\x07 \x03(\x0b\x32!.proto3.TestNestedMap.MapMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x44\n\x0bMapMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.proto3.TestNestedMap:\x02\x38\x01\"{\n\rTestStringMap\x12\x38\n\nstring_map\x18\x01 \x03(\x0b\x32$.proto3.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xee\x07\n\x0bTestWrapper\x12.\n\nbool_value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x30\n\x0bint32_value\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0bint64_value\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x32\n\x0cuint32_value\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\x12\x32\n\x0cuint64_value\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x30\n\x0b\x66loat_value\x18\x06 \x01(\x0b\x32\x1b.google.protobuf.FloatValue\x12\x32\n\x0c\x64ouble_value\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x32\n\x0cstring_value\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\x0b\x62ytes_value\x18\t \x01(\x0b\x32\x1b.google.protobuf.BytesValue\x12\x37\n\x13repeated_bool_value\x18\x0b \x03(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x14repeated_int32_value\x18\x0c \x03(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x39\n\x14repeated_int64_value\x18\r \x03(\x0b\x32\x1b.google.protobuf.Int64Value\x12;\n\x15repeated_uint32_value\x18\x0e \x03(\x0b\x32\x1c.google.protobuf.UInt32Value\x12;\n\x15repeated_uint64_value\x18\x0f \x03(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x39\n\x14repeated_float_value\x18\x10 \x03(\x0b\x32\x1b.google.protobuf.FloatValue\x12;\n\x15repeated_double_value\x18\x11 \x03(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15repeated_string_value\x18\x12 \x03(\x0b\x32\x1c.google.protobuf.StringValue\x12\x39\n\x14repeated_bytes_value\x18\x13 \x03(\x0b\x32\x1b.google.protobuf.BytesValue\"n\n\rTestTimestamp\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\"k\n\x0cTestDuration\x12(\n\x05value\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x19.google.protobuf.Duration\":\n\rTestFieldMask\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"e\n\nTestStruct\x12&\n\x05value\x18\x01 \x01(\x0b\x32\x17.google.protobuf.Struct\x12/\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Struct\"\\\n\x07TestAny\x12#\n\x05value\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12,\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x14.google.protobuf.Any\"b\n\tTestValue\x12%\n\x05value\x18\x01 \x01(\x0b\x32\x16.google.protobuf.Value\x12.\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Value\"n\n\rTestListValue\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.ListValue\"\x89\x01\n\rTestBoolValue\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x34\n\x08\x62ool_map\x18\x02 \x03(\x0b\x32\".proto3.TestBoolValue.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"+\n\x12TestCustomJsonName\x12\x15\n\x05value\x18\x01 \x01(\x05R\x06@value\"J\n\x0eTestExtensions\x12\x38\n\nextensions\x18\x01 \x01(\x0b\x32$.protobuf_unittest.TestAllExtensions\"\x84\x01\n\rTestEnumValue\x12%\n\x0b\x65num_value1\x18\x01 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value2\x18\x02 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value3\x18\x03 \x01(\x0e\x32\x10.proto3.EnumType*\x1c\n\x08\x45numType\x12\x07\n\x03\x46OO\x10\x00\x12\x07\n\x03\x42\x41R\x10\x01\x42,\n\x18\x63om.google.protobuf.utilB\x10JsonFormatProto3b\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_proto3_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\030com.google.protobuf.utilB\020JsonFormatProto3' + _TESTMAP_BOOLMAPENTRY._options = None + _TESTMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTMAP_INT32MAPENTRY._options = None + _TESTMAP_INT32MAPENTRY._serialized_options = b'8\001' + _TESTMAP_INT64MAPENTRY._options = None + _TESTMAP_INT64MAPENTRY._serialized_options = b'8\001' + _TESTMAP_UINT32MAPENTRY._options = None + _TESTMAP_UINT32MAPENTRY._serialized_options = b'8\001' + _TESTMAP_UINT64MAPENTRY._options = None + _TESTMAP_UINT64MAPENTRY._serialized_options = b'8\001' + _TESTMAP_STRINGMAPENTRY._options = None + _TESTMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_BOOLMAPENTRY._options = None + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_INT32MAPENTRY._options = None + _TESTNESTEDMAP_INT32MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_INT64MAPENTRY._options = None + _TESTNESTEDMAP_INT64MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_UINT32MAPENTRY._options = None + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_UINT64MAPENTRY._options = None + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_STRINGMAPENTRY._options = None + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_MAPMAPENTRY._options = None + _TESTNESTEDMAP_MAPMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGMAP_STRINGMAPENTRY._options = None + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTBOOLVALUE_BOOLMAPENTRY._options = None + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_options = b'8\001' + _ENUMTYPE._serialized_start=4849 + _ENUMTYPE._serialized_end=4877 + _MESSAGETYPE._serialized_start=277 + _MESSAGETYPE._serialized_end=305 + _TESTMESSAGE._serialized_start=308 + _TESTMESSAGE._serialized_end=968 + _TESTONEOF._serialized_start=971 + _TESTONEOF._serialized_end=1239 + _TESTMAP._serialized_start=1242 + _TESTMAP._serialized_end=1851 + _TESTMAP_BOOLMAPENTRY._serialized_start=1557 + _TESTMAP_BOOLMAPENTRY._serialized_end=1603 + _TESTMAP_INT32MAPENTRY._serialized_start=1605 + _TESTMAP_INT32MAPENTRY._serialized_end=1652 + _TESTMAP_INT64MAPENTRY._serialized_start=1654 + _TESTMAP_INT64MAPENTRY._serialized_end=1701 + _TESTMAP_UINT32MAPENTRY._serialized_start=1703 + _TESTMAP_UINT32MAPENTRY._serialized_end=1751 + _TESTMAP_UINT64MAPENTRY._serialized_start=1753 + _TESTMAP_UINT64MAPENTRY._serialized_end=1801 + _TESTMAP_STRINGMAPENTRY._serialized_start=1803 + _TESTMAP_STRINGMAPENTRY._serialized_end=1851 + _TESTNESTEDMAP._serialized_start=1854 + _TESTNESTEDMAP._serialized_end=2627 + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_start=1557 + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_end=1603 + _TESTNESTEDMAP_INT32MAPENTRY._serialized_start=1605 + _TESTNESTEDMAP_INT32MAPENTRY._serialized_end=1652 + _TESTNESTEDMAP_INT64MAPENTRY._serialized_start=1654 + _TESTNESTEDMAP_INT64MAPENTRY._serialized_end=1701 + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_start=1703 + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_end=1751 + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_start=1753 + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_end=1801 + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_start=1803 + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_end=1851 + _TESTNESTEDMAP_MAPMAPENTRY._serialized_start=2559 + _TESTNESTEDMAP_MAPMAPENTRY._serialized_end=2627 + _TESTSTRINGMAP._serialized_start=2629 + _TESTSTRINGMAP._serialized_end=2752 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=2704 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=2752 + _TESTWRAPPER._serialized_start=2755 + _TESTWRAPPER._serialized_end=3761 + _TESTTIMESTAMP._serialized_start=3763 + _TESTTIMESTAMP._serialized_end=3873 + _TESTDURATION._serialized_start=3875 + _TESTDURATION._serialized_end=3982 + _TESTFIELDMASK._serialized_start=3984 + _TESTFIELDMASK._serialized_end=4042 + _TESTSTRUCT._serialized_start=4044 + _TESTSTRUCT._serialized_end=4145 + _TESTANY._serialized_start=4147 + _TESTANY._serialized_end=4239 + _TESTVALUE._serialized_start=4241 + _TESTVALUE._serialized_end=4339 + _TESTLISTVALUE._serialized_start=4341 + _TESTLISTVALUE._serialized_end=4451 + _TESTBOOLVALUE._serialized_start=4454 + _TESTBOOLVALUE._serialized_end=4591 + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_start=1557 + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_end=1603 + _TESTCUSTOMJSONNAME._serialized_start=4593 + _TESTCUSTOMJSONNAME._serialized_end=4636 + _TESTEXTENSIONS._serialized_start=4638 + _TESTEXTENSIONS._serialized_end=4712 + _TESTENUMVALUE._serialized_start=4715 + _TESTENUMVALUE._serialized_end=4847 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/wrappers_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/wrappers_pb2.py new file mode 100644 index 0000000000..e49eb4c15d --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/wrappers_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/wrappers.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/wrappers.proto\x12\x0fgoogle.protobuf\"\x1c\n\x0b\x44oubleValue\x12\r\n\x05value\x18\x01 \x01(\x01\"\x1b\n\nFloatValue\x12\r\n\x05value\x18\x01 \x01(\x02\"\x1b\n\nInt64Value\x12\r\n\x05value\x18\x01 \x01(\x03\"\x1c\n\x0bUInt64Value\x12\r\n\x05value\x18\x01 \x01(\x04\"\x1b\n\nInt32Value\x12\r\n\x05value\x18\x01 \x01(\x05\"\x1c\n\x0bUInt32Value\x12\r\n\x05value\x18\x01 \x01(\r\"\x1a\n\tBoolValue\x12\r\n\x05value\x18\x01 \x01(\x08\"\x1c\n\x0bStringValue\x12\r\n\x05value\x18\x01 \x01(\t\"\x1b\n\nBytesValue\x12\r\n\x05value\x18\x01 \x01(\x0c\x42\x83\x01\n\x13\x63om.google.protobufB\rWrappersProtoP\x01Z1google.golang.org/protobuf/types/known/wrapperspb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.wrappers_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rWrappersProtoP\001Z1google.golang.org/protobuf/types/known/wrapperspb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _DOUBLEVALUE._serialized_start=51 + _DOUBLEVALUE._serialized_end=79 + _FLOATVALUE._serialized_start=81 + _FLOATVALUE._serialized_end=108 + _INT64VALUE._serialized_start=110 + _INT64VALUE._serialized_end=137 + _UINT64VALUE._serialized_start=139 + _UINT64VALUE._serialized_end=167 + _INT32VALUE._serialized_start=169 + _INT32VALUE._serialized_end=196 + _UINT32VALUE._serialized_start=198 + _UINT32VALUE._serialized_end=226 + _BOOLVALUE._serialized_start=228 + _BOOLVALUE._serialized_end=254 + _STRINGVALUE._serialized_start=256 + _STRINGVALUE._serialized_end=284 + _BYTESVALUE._serialized_start=286 + _BYTESVALUE._serialized_end=313 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/houdini/__init__.py b/openpype/hosts/houdini/__init__.py index a3ee38db8d..38bf1fcc2d 100644 --- a/openpype/hosts/houdini/__init__.py +++ b/openpype/hosts/houdini/__init__.py @@ -1,38 +1,10 @@ -import os +from .addon import ( + HoudiniAddon, + HOUDINI_HOST_DIR, +) -def add_implementation_envs(env, _app): - # Add requirements to HOUDINI_PATH and HOUDINI_MENU_PATH - pype_root = os.environ["OPENPYPE_REPOS_ROOT"] - - startup_path = os.path.join( - pype_root, "openpype", "hosts", "houdini", "startup" - ) - new_houdini_path = [startup_path] - new_houdini_menu_path = [startup_path] - - old_houdini_path = env.get("HOUDINI_PATH") or "" - old_houdini_menu_path = env.get("HOUDINI_MENU_PATH") or "" - - for path in old_houdini_path.split(os.pathsep): - if not path: - continue - - norm_path = os.path.normpath(path) - if norm_path not in new_houdini_path: - new_houdini_path.append(norm_path) - - for path in old_houdini_menu_path.split(os.pathsep): - if not path: - continue - - norm_path = os.path.normpath(path) - if norm_path not in new_houdini_menu_path: - new_houdini_menu_path.append(norm_path) - - # Add ampersand for unknown reason (Maybe is needed in Houdini?) - new_houdini_path.append("&") - new_houdini_menu_path.append("&") - - env["HOUDINI_PATH"] = os.pathsep.join(new_houdini_path) - env["HOUDINI_MENU_PATH"] = os.pathsep.join(new_houdini_menu_path) +__all__ = ( + "HoudiniAddon", + "HOUDINI_HOST_DIR", +) diff --git a/openpype/hosts/houdini/addon.py b/openpype/hosts/houdini/addon.py new file mode 100644 index 0000000000..8d88e83c56 --- /dev/null +++ b/openpype/hosts/houdini/addon.py @@ -0,0 +1,55 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +HOUDINI_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class HoudiniAddon(OpenPypeModule, IHostAddon): + name = "houdini" + host_name = "houdini" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + # Add requirements to HOUDINI_PATH and HOUDINI_MENU_PATH + startup_path = os.path.join(HOUDINI_HOST_DIR, "startup") + new_houdini_path = [startup_path] + new_houdini_menu_path = [startup_path] + + old_houdini_path = env.get("HOUDINI_PATH") or "" + old_houdini_menu_path = env.get("HOUDINI_MENU_PATH") or "" + + for path in old_houdini_path.split(os.pathsep): + if not path: + continue + + norm_path = os.path.normpath(path) + if norm_path not in new_houdini_path: + new_houdini_path.append(norm_path) + + for path in old_houdini_menu_path.split(os.pathsep): + if not path: + continue + + norm_path = os.path.normpath(path) + if norm_path not in new_houdini_menu_path: + new_houdini_menu_path.append(norm_path) + + # Add ampersand for unknown reason (Maybe is needed in Houdini?) + new_houdini_path.append("&") + new_houdini_menu_path.append("&") + + env["HOUDINI_PATH"] = os.pathsep.join(new_houdini_path) + env["HOUDINI_MENU_PATH"] = os.pathsep.join(new_houdini_menu_path) + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(HOUDINI_HOST_DIR, "hooks") + ] + + def get_workfile_extensions(self): + return [".hip", ".hiplc", ".hipnc"] diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index b5f5459392..e4af1913ef 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -13,8 +13,8 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.load import any_outdated_containers -import openpype.hosts.houdini -from openpype.hosts.houdini.api import lib +from openpype.hosts.houdini import HOUDINI_HOST_DIR +from openpype.hosts.houdini.api import lib, shelves from openpype.lib import ( register_event_callback, @@ -28,8 +28,7 @@ log = logging.getLogger("openpype.hosts.houdini") AVALON_CONTAINERS = "/obj/AVALON_CONTAINERS" IS_HEADLESS = not hasattr(hou, "ui") -HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.houdini.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PLUGINS_DIR = os.path.join(HOUDINI_HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") @@ -66,7 +65,7 @@ def install(): self._has_been_setup = True # add houdini vendor packages - hou_pythonpath = os.path.join(os.path.dirname(HOST_DIR), "vendor") + hou_pythonpath = os.path.join(HOUDINI_HOST_DIR, "vendor") sys.path.append(hou_pythonpath) @@ -74,6 +73,7 @@ def install(): # so it initializes into the correct scene FPS, Frame Range, etc. # todo: make sure this doesn't trigger when opening with last workfile _set_context_settings() + shelves.generate_shelves() def uninstall(): diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py new file mode 100644 index 0000000000..3ccab964cd --- /dev/null +++ b/openpype/hosts/houdini/api/shelves.py @@ -0,0 +1,182 @@ +import os +import logging +import platform + +from openpype.settings import get_project_settings + +import hou + +log = logging.getLogger("openpype.hosts.houdini.shelves") + + +def generate_shelves(): + """This function generates complete shelves from shelf set to tools + in Houdini from openpype project settings houdini shelf definition. + """ + current_os = platform.system().lower() + + # load configuration of houdini shelves + project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) + shelves_set_config = project_settings["houdini"]["shelves"] + + if not shelves_set_config: + log.debug("No custom shelves found in project settings.") + return + + for shelf_set_config in shelves_set_config: + shelf_set_filepath = shelf_set_config.get('shelf_set_source_path') + shelf_set_os_filepath = shelf_set_filepath[current_os] + if shelf_set_os_filepath: + if not os.path.isfile(shelf_set_os_filepath): + log.error("Shelf path doesn't exist - " + "{}".format(shelf_set_os_filepath)) + continue + + hou.shelves.newShelfSet(file_path=shelf_set_os_filepath) + continue + + shelf_set_name = shelf_set_config.get('shelf_set_name') + if not shelf_set_name: + log.warning("No name found in shelf set definition.") + continue + + shelves_definition = shelf_set_config.get('shelf_definition') + if not shelves_definition: + log.debug( + "No shelf definition found for shelf set named '{}'".format( + shelf_set_name + ) + ) + continue + + shelf_set = get_or_create_shelf_set(shelf_set_name) + for shelf_definition in shelves_definition: + shelf_name = shelf_definition.get('shelf_name') + if not shelf_name: + log.warning("No name found in shelf definition.") + continue + + shelf = get_or_create_shelf(shelf_name) + + if not shelf_definition.get('tools_list'): + log.debug( + "No tool definition found for shelf named {}".format( + shelf_name + ) + ) + continue + + mandatory_attributes = {'name', 'script'} + for tool_definition in shelf_definition.get('tools_list'): + # We verify that the name and script attibutes of the tool + # are set + if not all( + tool_definition[key] for key in mandatory_attributes + ): + log.warning( + "You need to specify at least the name and the " + "script path of the tool.") + continue + + tool = get_or_create_tool(tool_definition, shelf) + + if not tool: + continue + + # Add the tool to the shelf if not already in it + if tool not in shelf.tools(): + shelf.setTools(list(shelf.tools()) + [tool]) + + # Add the shelf in the shelf set if not already in it + if shelf not in shelf_set.shelves(): + shelf_set.setShelves(shelf_set.shelves() + (shelf,)) + + +def get_or_create_shelf_set(shelf_set_label): + """This function verifies if the shelf set label exists. If not, + creates a new shelf set. + + Arguments: + shelf_set_label (str): The label of the shelf set + + Returns: + hou.ShelfSet: The shelf set existing or the new one + """ + all_shelves_sets = hou.shelves.shelfSets().values() + + shelf_set = next((shelf for shelf in all_shelves_sets if + shelf.label() == shelf_set_label), None) + if shelf_set: + return shelf_set + + shelf_set_name = shelf_set_label.replace(' ', '_').lower() + new_shelf_set = hou.shelves.newShelfSet( + name=shelf_set_name, + label=shelf_set_label + ) + return new_shelf_set + + +def get_or_create_shelf(shelf_label): + """This function verifies if the shelf label exists. If not, creates + a new shelf. + + Arguments: + shelf_label (str): The label of the shelf + + Returns: + hou.Shelf: The shelf existing or the new one + """ + all_shelves = hou.shelves.shelves().values() + + shelf = next((s for s in all_shelves if s.label() == shelf_label), None) + if shelf: + return shelf + + shelf_name = shelf_label.replace(' ', '_').lower() + new_shelf = hou.shelves.newShelf( + name=shelf_name, + label=shelf_label + ) + return new_shelf + + +def get_or_create_tool(tool_definition, shelf): + """This function verifies if the tool exists and updates it. If not, creates + a new one. + + Arguments: + tool_definition (dict): Dict with label, script, icon and help + shelf (hou.Shelf): The parent shelf of the tool + + Returns: + hou.Tool: The tool updated or the new one + """ + existing_tools = shelf.tools() + tool_label = tool_definition.get('label') + + existing_tool = next( + (tool for tool in existing_tools if tool.label() == tool_label), + None + ) + if existing_tool: + tool_definition.pop('name', None) + tool_definition.pop('label', None) + existing_tool.setData(**tool_definition) + return existing_tool + + tool_name = tool_label.replace(' ', '_').lower() + + if not os.path.exists(tool_definition['script']): + log.warning( + "This path doesn't exist - {}".format(tool_definition['script']) + ) + return + + with open(tool_definition['script']) as f: + script = f.read() + tool_definition.update({'script': script}) + + new_tool = hou.shelves.newTool(name=tool_name, **tool_definition) + + return new_tool diff --git a/openpype/hosts/houdini/api/workio.py b/openpype/hosts/houdini/api/workio.py index e0213023fd..5f7efff333 100644 --- a/openpype/hosts/houdini/api/workio.py +++ b/openpype/hosts/houdini/api/workio.py @@ -2,11 +2,10 @@ import os import hou -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["houdini"] + return [".hip", ".hiplc", ".hipnc"] def has_unsaved_changes(): diff --git a/openpype/hosts/houdini/hooks/set_paths.py b/openpype/hosts/houdini/hooks/set_paths.py index cd2f98fb76..04a33b1643 100644 --- a/openpype/hosts/houdini/hooks/set_paths.py +++ b/openpype/hosts/houdini/hooks/set_paths.py @@ -1,5 +1,4 @@ from openpype.lib import PreLaunchHook -import os class SetPath(PreLaunchHook): @@ -15,4 +14,4 @@ class SetPath(PreLaunchHook): self.log.warning("BUG: Workdir is not filled.") return - os.chdir(workdir) + self.launch_context.kwargs["cwd"] = workdir diff --git a/openpype/hosts/houdini/plugins/load/load_image.py b/openpype/hosts/houdini/plugins/load/load_image.py index 928c2ee734..c78798e58a 100644 --- a/openpype/hosts/houdini/plugins/load/load_image.py +++ b/openpype/hosts/houdini/plugins/load/load_image.py @@ -73,7 +73,7 @@ class ImageLoader(load.LoaderPlugin): # Imprint it manually data = { - "schema": "avalon-core:container-2.0", + "schema": "openpype:container-2.0", "id": AVALON_CONTAINER_ID, "name": node_name, "namespace": namespace, diff --git a/openpype/hosts/houdini/plugins/load/load_usd_layer.py b/openpype/hosts/houdini/plugins/load/load_usd_layer.py index 48580fc3aa..2e5079925b 100644 --- a/openpype/hosts/houdini/plugins/load/load_usd_layer.py +++ b/openpype/hosts/houdini/plugins/load/load_usd_layer.py @@ -43,7 +43,7 @@ class USDSublayerLoader(load.LoaderPlugin): # Imprint it manually data = { - "schema": "avalon-core:container-2.0", + "schema": "openpype:container-2.0", "id": AVALON_CONTAINER_ID, "name": node_name, "namespace": namespace, diff --git a/openpype/hosts/houdini/plugins/load/load_usd_reference.py b/openpype/hosts/houdini/plugins/load/load_usd_reference.py index 6851c77e6d..c4371db39b 100644 --- a/openpype/hosts/houdini/plugins/load/load_usd_reference.py +++ b/openpype/hosts/houdini/plugins/load/load_usd_reference.py @@ -43,7 +43,7 @@ class USDReferenceLoader(load.LoaderPlugin): # Imprint it manually data = { - "schema": "avalon-core:container-2.0", + "schema": "openpype:container-2.0", "id": AVALON_CONTAINER_ID, "name": node_name, "namespace": namespace, diff --git a/openpype/hosts/houdini/plugins/publish/collect_current_file.py b/openpype/hosts/houdini/plugins/publish/collect_current_file.py index c0b987ebbc..1383c274a2 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/collect_current_file.py @@ -1,27 +1,28 @@ import os import hou +from openpype.pipeline import legacy_io import pyblish.api class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): """Inject the current working file into context""" - order = pyblish.api.CollectorOrder - 0.5 + order = pyblish.api.CollectorOrder - 0.01 label = "Houdini Current File" hosts = ["houdini"] def process(self, context): """Inject the current working file""" - filepath = hou.hipFile.path() - if not os.path.exists(filepath): + current_file = hou.hipFile.path() + if not os.path.exists(current_file): # By default Houdini will even point a new scene to a path. # However if the file is not saved at all and does not exist, # we assume the user never set it. filepath = "" - elif os.path.basename(filepath) == "untitled.hip": + elif os.path.basename(current_file) == "untitled.hip": # Due to even a new file being called 'untitled.hip' we are unable # to confirm the current scene was ever saved because the file # could have existed already. We will allow it if the file exists, @@ -33,4 +34,43 @@ class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): "saved correctly." ) - context.data["currentFile"] = filepath + context.data["currentFile"] = current_file + + folder, file = os.path.split(current_file) + filename, ext = os.path.splitext(file) + + task = legacy_io.Session["AVALON_TASK"] + + data = {} + + # create instance + instance = context.create_instance(name=filename) + subset = 'workfile' + task.capitalize() + + data.update({ + "subset": subset, + "asset": os.getenv("AVALON_ASSET", None), + "label": subset, + "publish": True, + "family": 'workfile', + "families": ['workfile'], + "setMembers": [current_file], + "frameStart": context.data['frameStart'], + "frameEnd": context.data['frameEnd'], + "handleStart": context.data['handleStart'], + "handleEnd": context.data['handleEnd'] + }) + + data['representations'] = [{ + 'name': ext.lstrip("."), + 'ext': ext.lstrip("."), + 'files': file, + "stagingDir": folder, + }] + + instance.data.update(data) + + self.log.info('Collected instance: {}'.format(file)) + self.log.info('Scene path: {}'.format(current_file)) + self.log.info('staging Dir: {}'.format(folder)) + self.log.info('subset: {}'.format(subset)) diff --git a/openpype/hosts/houdini/plugins/publish/collect_inputs.py b/openpype/hosts/houdini/plugins/publish/collect_inputs.py index 8c7098c710..9ee0248bd9 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_inputs.py +++ b/openpype/hosts/houdini/plugins/publish/collect_inputs.py @@ -1,3 +1,5 @@ +from bson.objectid import ObjectId + import pyblish.api from openpype.pipeline import registered_host @@ -115,7 +117,7 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin): # Collect containers for the given set of nodes containers = collect_input_containers(nodes) - inputs = [c["representation"] for c in containers] - instance.data["inputs"] = inputs + inputs = [ObjectId(c["representation"]) for c in containers] + instance.data["inputRepresentations"] = inputs self.log.info("Collected inputs: %s" % inputs) diff --git a/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py b/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py index c635a53074..d56d389be0 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py +++ b/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py @@ -1,7 +1,7 @@ import pyblish.api -import openpype.api import hou +from openpype.pipeline.publish import RepairAction from openpype.hosts.houdini.api import lib @@ -13,7 +13,7 @@ class CollectRemotePublishSettings(pyblish.api.ContextPlugin): hosts = ["houdini"] targets = ["deadline"] label = "Remote Publish Submission Settings" - actions = [openpype.api.RepairAction] + actions = [RepairAction] def process(self, context): diff --git a/openpype/hosts/houdini/plugins/publish/extract_alembic.py b/openpype/hosts/houdini/plugins/publish/extract_alembic.py index 83b790407f..758d4c560b 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_alembic.py +++ b/openpype/hosts/houdini/plugins/publish/extract_alembic.py @@ -1,11 +1,12 @@ import os import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop -class ExtractAlembic(openpype.api.Extractor): +class ExtractAlembic(publish.Extractor): order = pyblish.api.ExtractorOrder label = "Extract Alembic" diff --git a/openpype/hosts/houdini/plugins/publish/extract_ass.py b/openpype/hosts/houdini/plugins/publish/extract_ass.py index e56e40df85..a302b451cb 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_ass.py +++ b/openpype/hosts/houdini/plugins/publish/extract_ass.py @@ -1,11 +1,12 @@ import os import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop -class ExtractAss(openpype.api.Extractor): +class ExtractAss(publish.Extractor): order = pyblish.api.ExtractorOrder + 0.1 label = "Extract Ass" diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py index f300b6d28d..23e875f107 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_composite.py +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -1,12 +1,12 @@ import os import pyblish.api -import openpype.api +from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop -class ExtractComposite(openpype.api.Extractor): +class ExtractComposite(publish.Extractor): order = pyblish.api.ExtractorOrder label = "Extract Composite (Image Sequence)" diff --git a/openpype/hosts/houdini/plugins/publish/extract_hda.py b/openpype/hosts/houdini/plugins/publish/extract_hda.py index 301dd4e297..7dd03a92b7 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_hda.py +++ b/openpype/hosts/houdini/plugins/publish/extract_hda.py @@ -4,10 +4,11 @@ import os from pprint import pformat import pyblish.api -import openpype.api + +from openpype.pipeline import publish -class ExtractHDA(openpype.api.Extractor): +class ExtractHDA(publish.Extractor): order = pyblish.api.ExtractorOrder label = "Extract HDA" diff --git a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py index c754d60c59..ca9be64a47 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py @@ -1,11 +1,12 @@ import os import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop -class ExtractRedshiftProxy(openpype.api.Extractor): +class ExtractRedshiftProxy(publish.Extractor): order = pyblish.api.ExtractorOrder + 0.1 label = "Extract Redshift Proxy" diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index 0fc26900fb..78c32affb4 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -1,11 +1,12 @@ import os import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop -class ExtractUSD(openpype.api.Extractor): +class ExtractUSD(publish.Extractor): order = pyblish.api.ExtractorOrder label = "Extract USD" diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index 80919c023b..f686f712bb 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -5,7 +5,6 @@ import sys from collections import deque import pyblish.api -import openpype.api from openpype.client import ( get_asset_by_name, @@ -16,6 +15,7 @@ from openpype.client import ( from openpype.pipeline import ( get_representation_path, legacy_io, + publish, ) import openpype.hosts.houdini.api.usd as hou_usdlib from openpype.hosts.houdini.api.lib import render_rop @@ -160,7 +160,7 @@ def parm_values(overrides): parm.set(value) -class ExtractUSDLayered(openpype.api.Extractor): +class ExtractUSDLayered(publish.Extractor): order = pyblish.api.ExtractorOrder label = "Extract Layered USD" diff --git a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py index 113e1b0bcb..26ec423048 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py +++ b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py @@ -1,11 +1,12 @@ import os import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop -class ExtractVDBCache(openpype.api.Extractor): +class ExtractVDBCache(publish.Extractor): order = pyblish.api.ExtractorOrder + 0.1 label = "Extract VDB Cache" diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file.py b/openpype/hosts/houdini/plugins/publish/increment_current_file.py index c5cacd1880..c990f481d3 100644 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file.py @@ -1,11 +1,10 @@ import pyblish.api -from openpype.api import version_up -from openpype.action import get_errored_plugins_from_data +from openpype.lib import version_up from openpype.pipeline import registered_host -class IncrementCurrentFile(pyblish.api.InstancePlugin): +class IncrementCurrentFile(pyblish.api.ContextPlugin): """Increment the current file. Saves the current scene with an increased version number. @@ -15,30 +14,10 @@ class IncrementCurrentFile(pyblish.api.InstancePlugin): label = "Increment current file" order = pyblish.api.IntegratorOrder + 9.0 hosts = ["houdini"] - families = ["colorbleed.usdrender", "redshift_rop"] - targets = ["local"] + families = ["workfile"] + optional = True - def process(self, instance): - - # This should be a ContextPlugin, but this is a workaround - # for a bug in pyblish to run once for a family: issue #250 - context = instance.context - key = "__hasRun{}".format(self.__class__.__name__) - if context.data.get(key, False): - return - else: - context.data[key] = True - - context = instance.context - errored_plugins = get_errored_plugins_from_data(context) - if any( - plugin.__name__ == "HoudiniSubmitPublishDeadline" - for plugin in errored_plugins - ): - raise RuntimeError( - "Skipping incrementing current file because " - "submission to deadline failed." - ) + def process(self, context): # Filename must not have changed since collecting host = registered_host() diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py b/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py deleted file mode 100644 index faa015f739..0000000000 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py +++ /dev/null @@ -1,35 +0,0 @@ -import pyblish.api - -import hou -from openpype.api import version_up -from openpype.action import get_errored_plugins_from_data - - -class IncrementCurrentFileDeadline(pyblish.api.ContextPlugin): - """Increment the current file. - - Saves the current scene with an increased version number. - - """ - - label = "Increment current file" - order = pyblish.api.IntegratorOrder + 9.0 - hosts = ["houdini"] - targets = ["deadline"] - - def process(self, context): - - errored_plugins = get_errored_plugins_from_data(context) - if any( - plugin.__name__ == "HoudiniSubmitPublishDeadline" - for plugin in errored_plugins - ): - raise RuntimeError( - "Skipping incrementing current file because " - "submission to deadline failed." - ) - - current_filepath = context.data["currentFile"] - new_filepath = version_up(current_filepath) - - hou.hipFile.save(file_name=new_filepath, save_to_recent_files=True) diff --git a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py index 0ae1bc94eb..ac408bc842 100644 --- a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateVDBInputNode(pyblish.api.InstancePlugin): @@ -16,7 +16,7 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + 0.1 + order = ValidateContentsOrder + 0.1 families = ["vdbcache"] hosts = ["houdini"] label = "Validate Input Node (VDB)" diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py index 3e17d3e8de..ea800707fb 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py +++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py @@ -1,8 +1,9 @@ import pyblish.api -import openpype.api from collections import defaultdict +from openpype.pipeline.publish import ValidateContentsOrder + class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): """Validate Alembic ROP Primitive to Detail attribute is consistent. @@ -15,7 +16,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + 0.1 + order = ValidateContentsOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Primitive to Detail (Abc)" diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py index e9126ffef0..cbed3ea235 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py @@ -1,5 +1,6 @@ import pyblish.api -import openpype.api + +from openpype.pipeline.publish import ValidateContentsOrder class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): @@ -17,7 +18,7 @@ class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + 0.1 + order = ValidateContentsOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Alembic ROP Face Sets" diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py index 8d7e3b611f..2625ae5f83 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py @@ -1,5 +1,6 @@ import pyblish.api -import colorbleed.api + +from openpype.pipeline.publish import ValidateContentsOrder class ValidateAlembicInputNode(pyblish.api.InstancePlugin): @@ -11,7 +12,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): """ - order = colorbleed.api.ValidateContentsOrder + 0.1 + order = ValidateContentsOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Input Node (Abc)" diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index fc4e18f701..7cf8da69d6 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateBypassed(pyblish.api.InstancePlugin): @@ -11,7 +11,7 @@ class ValidateBypassed(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder - 0.1 + order = ValidateContentsOrder - 0.1 families = ["*"] hosts = ["houdini"] label = "Validate ROP Bypass" diff --git a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py index a0919e1323..d414920f8b 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py +++ b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py @@ -1,11 +1,11 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateCameraROP(pyblish.api.InstancePlugin): """Validate Camera ROP settings.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["camera"] hosts = ["houdini"] label = "Camera ROP" diff --git a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py index cd72877949..be6a798a95 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py @@ -1,11 +1,11 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin): """Validate Create Intermediate Directories is enabled on ROP node.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["pointcache", "camera", "vdbcache"] hosts = ["houdini"] label = "Create Intermediate Directories Checked" diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py index f58e5f8d7d..76635d4ed5 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -1,6 +1,6 @@ import pyblish.api -import openpype.api import hou +from openpype.pipeline.publish import ValidateContentsOrder def cook_in_range(node, start, end): @@ -28,7 +28,7 @@ def get_errors(node): class ValidateNoErrors(pyblish.api.InstancePlugin): """Validate the Instance has no current cooking errors.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["houdini"] label = "Validate no errors" diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py index 1eb36763bb..7a8cd04f15 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): @@ -11,7 +11,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + 0.1 + order = ValidateContentsOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Prims Hierarchy Path" diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py index 95c66edff0..0ab182c584 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py @@ -1,7 +1,7 @@ import pyblish.api -import openpype.api from openpype.hosts.houdini.api import lib +from openpype.pipeline.publish import RepairContextAction import hou @@ -14,7 +14,7 @@ class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin): hosts = ["houdini"] targets = ["deadline"] label = "Remote Publish ROP node" - actions = [openpype.api.RepairContextAction] + actions = [RepairContextAction] def process(self, context): diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py index b681fd0ee1..afc8df7528 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py @@ -1,7 +1,7 @@ import pyblish.api -import openpype.api import hou +from openpype.pipeline.publish import RepairContextAction class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): @@ -12,7 +12,7 @@ class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): hosts = ["houdini"] targets = ["deadline"] label = "Remote Publish ROP enabled" - actions = [openpype.api.RepairContextAction] + actions = [RepairContextAction] def process(self, context): diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py index b979b87d84..f08c7c72c5 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py @@ -3,14 +3,14 @@ import re import pyblish.api from openpype.client import get_subset_by_name -import openpype.api from openpype.pipeline import legacy_io +from openpype.pipeline.publish import ValidateContentsOrder class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): """Validate the Instance has no current cooking errors.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["houdini"] families = ["usdShade"] label = "USD Shade model exists" diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py index a77ca2f3cb..a4902b48a9 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder import hou @@ -12,7 +12,7 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["houdini"] families = ["usdShade"] label = "USD Shade Workspace" diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py index 0ae1bc94eb..ac408bc842 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateVDBInputNode(pyblish.api.InstancePlugin): @@ -16,7 +16,7 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + 0.1 + order = ValidateContentsOrder + 0.1 families = ["vdbcache"] hosts = ["houdini"] label = "Validate Input Node (VDB)" diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index 1ba840b71d..55ed581d4c 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -1,6 +1,6 @@ import pyblish.api -import openpype.api import hou +from openpype.pipeline.publish import ValidateContentsOrder class ValidateVDBOutputNode(pyblish.api.InstancePlugin): @@ -17,7 +17,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + 0.1 + order = ValidateContentsOrder + 0.1 families = ["vdbcache"] hosts = ["houdini"] label = "Validate Output Node (VDB)" diff --git a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py new file mode 100644 index 0000000000..560b355e21 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +import pyblish.api +import hou + +from openpype.pipeline.publish import RepairAction + + +class ValidateWorkfilePaths(pyblish.api.InstancePlugin): + """Validate workfile paths so they are absolute.""" + + order = pyblish.api.ValidatorOrder + families = ["workfile"] + hosts = ["houdini"] + label = "Validate Workfile Paths" + actions = [RepairAction] + optional = True + + node_types = ["file", "alembic"] + prohibited_vars = ["$HIP", "$JOB"] + + def process(self, instance): + invalid = self.get_invalid() + self.log.info( + "node types to check: {}".format(", ".join(self.node_types))) + self.log.info( + "prohibited vars: {}".format(", ".join(self.prohibited_vars)) + ) + if invalid: + for param in invalid: + self.log.error( + "{}: {}".format(param.path(), param.unexpandedString())) + + raise RuntimeError("Invalid paths found") + + @classmethod + def get_invalid(cls): + invalid = [] + for param, _ in hou.fileReferences(): + if param is None: + continue + + # skip nodes we are not interested in + if param.node().type().name() not in cls.node_types: + continue + + if any( + v for v in cls.prohibited_vars + if v in param.unexpandedString()): + invalid.append(param) + + return invalid + + @classmethod + def repair(cls, instance): + invalid = cls.get_invalid() + for param in invalid: + cls.log.info("processing: {}".format(param.path())) + cls.log.info("Replacing {} for {}".format( + param.unexpandedString(), + hou.text.expandString(param.unexpandedString()))) + param.set(hou.text.expandString(param.unexpandedString())) diff --git a/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py b/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py new file mode 100644 index 0000000000..afadbffd3e --- /dev/null +++ b/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py @@ -0,0 +1,10 @@ +from openpype.pipeline import install_host +from openpype.hosts.houdini import api + + +def main(): + print("Installing OpenPype ...") + install_host(api) + + +main() diff --git a/openpype/hosts/maya/__init__.py b/openpype/hosts/maya/__init__.py index 72b4d5853c..bb940a881b 100644 --- a/openpype/hosts/maya/__init__.py +++ b/openpype/hosts/maya/__init__.py @@ -1,6 +1,10 @@ -from .module import OpenPypeMaya +from .addon import ( + MayaAddon, + MAYA_ROOT_DIR, +) __all__ = ( - "OpenPypeMaya", + "MayaAddon", + "MAYA_ROOT_DIR", ) diff --git a/openpype/hosts/maya/module.py b/openpype/hosts/maya/addon.py similarity index 70% rename from openpype/hosts/maya/module.py rename to openpype/hosts/maya/addon.py index 5a215be8d2..cdd2bc1667 100644 --- a/openpype/hosts/maya/module.py +++ b/openpype/hosts/maya/addon.py @@ -1,12 +1,12 @@ import os from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostModule +from openpype.modules.interfaces import IHostAddon MAYA_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class OpenPypeMaya(OpenPypeModule, IHostModule): - name = "openpype_maya" +class MayaAddon(OpenPypeModule, IHostAddon): + name = "maya" host_name = "maya" def initialize(self, module_settings): @@ -28,13 +28,16 @@ class OpenPypeMaya(OpenPypeModule, IHostModule): env["PYTHONPATH"] = os.pathsep.join(new_python_paths) - # Set default values if are not already set via settings - defaults = { - "OPENPYPE_LOG_NO_COLORS": "Yes" + # Set default environments + envs = { + "OPENPYPE_LOG_NO_COLORS": "Yes", + # For python module 'qtpy' + "QT_API": "PySide2", + # For python module 'Qt' + "QT_PREFERRED_BINDING": "PySide2" } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value + for key, value in envs.items(): + env[key] = value def get_launch_hook_paths(self, app): if app.host_name != self.host_name: diff --git a/openpype/hosts/maya/api/action.py b/openpype/hosts/maya/api/action.py index 90605734e7..065fdf3691 100644 --- a/openpype/hosts/maya/api/action.py +++ b/openpype/hosts/maya/api/action.py @@ -5,7 +5,7 @@ import pyblish.api from openpype.client import get_asset_by_name from openpype.pipeline import legacy_io -from openpype.api import get_errored_instances_from_context +from openpype.pipeline.publish import get_errored_instances_from_context class GenerateUUIDsOnInvalidAction(pyblish.api.Action): diff --git a/openpype/hosts/maya/api/customize.py b/openpype/hosts/maya/api/customize.py index 683e6b24b0..f66858dfb6 100644 --- a/openpype/hosts/maya/api/customize.py +++ b/openpype/hosts/maya/api/customize.py @@ -8,7 +8,7 @@ from functools import partial import maya.cmds as cmds import maya.mel as mel -from openpype.api import resources +from openpype import resources from openpype.tools.utils import host_tools from .lib import get_main_window diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 58e160cb2f..7e15a91eca 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -23,7 +23,7 @@ from openpype.client import ( get_last_versions, get_representation_by_name ) -from openpype.api import get_anatomy_settings +from openpype.settings import get_project_settings from openpype.pipeline import ( legacy_io, discover_loader_plugins, @@ -2459,182 +2459,120 @@ def bake_to_world_space(nodes, def load_capture_preset(data=None): + """Convert OpenPype Extract Playblast settings to `capture` arguments + + Input data is the settings from: + `project_settings/maya/publish/ExtractPlayblast/capture_preset` + + Args: + data (dict): Capture preset settings from OpenPype settings + + Returns: + dict: `capture.capture` compatible keyword arguments + + """ + import capture - preset = data - options = dict() + viewport_options = dict() + viewport2_options = dict() + camera_options = dict() - # CODEC - id = 'Codec' - for key in preset[id]: - options[str(key)] = preset[id][key] + # Straight key-value match from settings to capture arguments + options.update(data["Codec"]) + options.update(data["Generic"]) + options.update(data["Resolution"]) - # GENERIC - id = 'Generic' - for key in preset[id]: - options[str(key)] = preset[id][key] - - # RESOLUTION - id = 'Resolution' - options['height'] = preset[id]['height'] - options['width'] = preset[id]['width'] + camera_options.update(data['Camera Options']) + viewport_options.update(data["Renderer"]) # DISPLAY OPTIONS - id = 'Display Options' disp_options = {} - for key in preset['Display Options']: + for key, value in data['Display Options'].items(): if key.startswith('background'): - disp_options[key] = preset['Display Options'][key] - if len(disp_options[key]) == 4: - disp_options[key][0] = (float(disp_options[key][0])/255) - disp_options[key][1] = (float(disp_options[key][1])/255) - disp_options[key][2] = (float(disp_options[key][2])/255) - disp_options[key].pop() + # Convert background, backgroundTop, backgroundBottom colors + if len(value) == 4: + # Ignore alpha + convert RGB to float + value = [ + float(value[0]) / 255, + float(value[1]) / 255, + float(value[2]) / 255 + ] + disp_options[key] = value else: disp_options['displayGradient'] = True options['display_options'] = disp_options - # VIEWPORT OPTIONS - temp_options = {} - id = 'Renderer' - for key in preset[id]: - temp_options[str(key)] = preset[id][key] + # Viewport Options has a mixture of Viewport2 Options and Viewport Options + # to pass along to capture. So we'll need to differentiate between the two + VIEWPORT2_OPTIONS = { + "textureMaxResolution", + "renderDepthOfField", + "ssaoEnable", + "ssaoSamples", + "ssaoAmount", + "ssaoRadius", + "ssaoFilterRadius", + "hwFogStart", + "hwFogEnd", + "hwFogAlpha", + "hwFogFalloff", + "hwFogColorR", + "hwFogColorG", + "hwFogColorB", + "hwFogDensity", + "motionBlurEnable", + "motionBlurSampleCount", + "motionBlurShutterOpenFraction", + "lineAAEnable" + } + for key, value in data['Viewport Options'].items(): - temp_options2 = {} - id = 'Viewport Options' - for key in preset[id]: + # There are some keys we want to ignore + if key in {"override_viewport_options", "high_quality"}: + continue + + # First handle special cases where we do value conversion to + # separate option values if key == 'textureMaxResolution': - if preset[id][key] > 0: - temp_options2['textureMaxResolution'] = preset[id][key] - temp_options2['enableTextureMaxRes'] = True - temp_options2['textureMaxResMode'] = 1 + viewport2_options['textureMaxResolution'] = value + if value > 0: + viewport2_options['enableTextureMaxRes'] = True + viewport2_options['textureMaxResMode'] = 1 else: - temp_options2['textureMaxResolution'] = preset[id][key] - temp_options2['enableTextureMaxRes'] = False - temp_options2['textureMaxResMode'] = 0 + viewport2_options['enableTextureMaxRes'] = False + viewport2_options['textureMaxResMode'] = 0 - if key == 'multiSample': - if preset[id][key] > 0: - temp_options2['multiSampleEnable'] = True - temp_options2['multiSampleCount'] = preset[id][key] - else: - temp_options2['multiSampleEnable'] = False - temp_options2['multiSampleCount'] = preset[id][key] + elif key == 'multiSample': + viewport2_options['multiSampleEnable'] = value > 0 + viewport2_options['multiSampleCount'] = value - if key == 'renderDepthOfField': - temp_options2['renderDepthOfField'] = preset[id][key] + elif key == 'alphaCut': + viewport2_options['transparencyAlgorithm'] = 5 + viewport2_options['transparencyQuality'] = 1 - if key == 'ssaoEnable': - if preset[id][key] is True: - temp_options2['ssaoEnable'] = True - else: - temp_options2['ssaoEnable'] = False + elif key == 'hwFogFalloff': + # Settings enum value string to integer + viewport2_options['hwFogFalloff'] = int(value) - if key == 'ssaoSamples': - temp_options2['ssaoSamples'] = preset[id][key] - - if key == 'ssaoAmount': - temp_options2['ssaoAmount'] = preset[id][key] - - if key == 'ssaoRadius': - temp_options2['ssaoRadius'] = preset[id][key] - - if key == 'hwFogDensity': - temp_options2['hwFogDensity'] = preset[id][key] - - if key == 'ssaoFilterRadius': - temp_options2['ssaoFilterRadius'] = preset[id][key] - - if key == 'alphaCut': - temp_options2['transparencyAlgorithm'] = 5 - temp_options2['transparencyQuality'] = 1 - - if key == 'headsUpDisplay': - temp_options['headsUpDisplay'] = True - - if key == 'fogging': - temp_options['fogging'] = preset[id][key] or False - - if key == 'hwFogStart': - temp_options2['hwFogStart'] = preset[id][key] - - if key == 'hwFogEnd': - temp_options2['hwFogEnd'] = preset[id][key] - - if key == 'hwFogAlpha': - temp_options2['hwFogAlpha'] = preset[id][key] - - if key == 'hwFogFalloff': - temp_options2['hwFogFalloff'] = int(preset[id][key]) - - if key == 'hwFogColorR': - temp_options2['hwFogColorR'] = preset[id][key] - - if key == 'hwFogColorG': - temp_options2['hwFogColorG'] = preset[id][key] - - if key == 'hwFogColorB': - temp_options2['hwFogColorB'] = preset[id][key] - - if key == 'motionBlurEnable': - if preset[id][key] is True: - temp_options2['motionBlurEnable'] = True - else: - temp_options2['motionBlurEnable'] = False - - if key == 'motionBlurSampleCount': - temp_options2['motionBlurSampleCount'] = preset[id][key] - - if key == 'motionBlurShutterOpenFraction': - temp_options2['motionBlurShutterOpenFraction'] = preset[id][key] - - if key == 'lineAAEnable': - if preset[id][key] is True: - temp_options2['lineAAEnable'] = True - else: - temp_options2['lineAAEnable'] = False + # Then handle Viewport 2.0 Options + elif key in VIEWPORT2_OPTIONS: + viewport2_options[key] = value + # Then assume remainder is Viewport Options else: - temp_options[str(key)] = preset[id][key] + viewport_options[key] = value - for key in ['override_viewport_options', - 'high_quality', - 'alphaCut', - 'gpuCacheDisplayFilter', - 'multiSample', - 'ssaoEnable', - 'ssaoSamples', - 'ssaoAmount', - 'ssaoFilterRadius', - 'ssaoRadius', - 'hwFogStart', - 'hwFogEnd', - 'hwFogAlpha', - 'hwFogFalloff', - 'hwFogColorR', - 'hwFogColorG', - 'hwFogColorB', - 'hwFogDensity', - 'textureMaxResolution', - 'motionBlurEnable', - 'motionBlurSampleCount', - 'motionBlurShutterOpenFraction', - 'lineAAEnable', - 'renderDepthOfField' - ]: - temp_options.pop(key, None) - - options['viewport_options'] = temp_options - options['viewport2_options'] = temp_options2 + options['viewport_options'] = viewport_options + options['viewport2_options'] = viewport2_options + options['camera_options'] = camera_options # use active sound track scene = capture.parse_active_scene() options['sound'] = scene['sound'] - # options['display_options'] = temp_options - return options @@ -3159,7 +3097,7 @@ def set_colorspace(): """Set Colorspace from project configuration """ project_name = os.getenv("AVALON_PROJECT") - imageio = get_anatomy_settings(project_name)["imageio"]["maya"] + imageio = get_project_settings(project_name)["maya"]["imageio"] # Maya 2022+ introduces new OCIO v2 color management settings that # can override the old color managenement preferences. OpenPype has diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 1e883ea43f..cd204445b7 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -80,7 +80,7 @@ IMAGE_PREFIXES = { "mayahardware2": "defaultRenderGlobals.imageFilePrefix" } -RENDERMAN_IMAGE_DIR = "maya//" +RENDERMAN_IMAGE_DIR = "/" def has_tokens(string, tokens): @@ -260,20 +260,20 @@ class ARenderProducts: """ try: - file_prefix_attr = IMAGE_PREFIXES[self.renderer] + prefix_attr = IMAGE_PREFIXES[self.renderer] except KeyError: raise UnsupportedRendererException( "Unsupported renderer {}".format(self.renderer) ) - file_prefix = self._get_attr(file_prefix_attr) + prefix = self._get_attr(prefix_attr) - if not file_prefix: + if not prefix: # Fall back to scene name by default log.debug("Image prefix not set, using ") file_prefix = "" - return file_prefix + return prefix def get_render_attribute(self, attribute): """Get attribute from render options. @@ -730,13 +730,16 @@ class RenderProductsVray(ARenderProducts): """Get image prefix for V-Ray. This overrides :func:`ARenderProducts.get_renderer_prefix()` as - we must add `` token manually. + we must add `` token manually. This is done only for + non-multipart outputs, where `` token doesn't make sense. See also: :func:`ARenderProducts.get_renderer_prefix()` """ prefix = super(RenderProductsVray, self).get_renderer_prefix() + if self.multipart: + return prefix aov_separator = self._get_aov_separator() prefix = "{}{}".format(prefix, aov_separator) return prefix @@ -974,15 +977,18 @@ class RenderProductsRedshift(ARenderProducts): """Get image prefix for Redshift. This overrides :func:`ARenderProducts.get_renderer_prefix()` as - we must add `` token manually. + we must add `` token manually. This is done only for + non-multipart outputs, where `` token doesn't make sense. See also: :func:`ARenderProducts.get_renderer_prefix()` """ - file_prefix = super(RenderProductsRedshift, self).get_renderer_prefix() - separator = self.extract_separator(file_prefix) - prefix = "{}{}".format(file_prefix, separator or "_") + prefix = super(RenderProductsRedshift, self).get_renderer_prefix() + if self.multipart: + return prefix + separator = self.extract_separator(prefix) + prefix = "{}{}".format(prefix, separator or "_") return prefix def get_render_products(self): diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 7cd2193086..2b996702c3 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -5,7 +5,8 @@ import maya.mel as mel import six import sys -from openpype.api import ( +from openpype.lib import Logger +from openpype.settings import ( get_project_settings, get_current_project_settings ) @@ -28,7 +29,7 @@ class RenderSettings(object): _image_prefixes = { 'vray': get_current_project_settings()["maya"]["RenderSettings"]["vray_renderer"]["image_prefix"], # noqa 'arnold': get_current_project_settings()["maya"]["RenderSettings"]["arnold_renderer"]["image_prefix"], # noqa - 'renderman': 'maya///{aov_separator}', + 'renderman': '//{aov_separator}', 'redshift': get_current_project_settings()["maya"]["RenderSettings"]["redshift_renderer"]["image_prefix"] # noqa } @@ -38,6 +39,8 @@ class RenderSettings(object): "underscore": "_" } + log = Logger.get_logger("RenderSettings") + @classmethod def get_image_prefix_attr(cls, renderer): return cls._image_prefix_nodes[renderer] @@ -133,20 +136,7 @@ class RenderSettings(object): cmds.setAttr( "defaultArnoldDriver.mergeAOVs", multi_exr) - # Passes additional options in from the schema as a list - # but converts it to a dictionary because ftrack doesn't - # allow fullstops in custom attributes. Then checks for - # type of MtoA attribute passed to adjust the `setAttr` - # command accordingly. self._additional_attribs_setter(additional_options) - for item in additional_options: - attribute, value = item - if (cmds.getAttr(str(attribute), type=True)) == "long": - cmds.setAttr(str(attribute), int(value)) - elif (cmds.getAttr(str(attribute), type=True)) == "bool": - cmds.setAttr(str(attribute), int(value), type = "Boolean") # noqa - elif (cmds.getAttr(str(attribute), type=True)) == "string": - cmds.setAttr(str(attribute), str(value), type = "string") # noqa reset_frame_range() def _set_redshift_settings(self, width, height): @@ -230,12 +220,20 @@ class RenderSettings(object): cmds.setAttr("defaultRenderGlobals.extensionPadding", 4) def _additional_attribs_setter(self, additional_attribs): - print(additional_attribs) for item in additional_attribs: attribute, value = item - if (cmds.getAttr(str(attribute), type=True)) == "long": - cmds.setAttr(str(attribute), int(value)) - elif (cmds.getAttr(str(attribute), type=True)) == "bool": - cmds.setAttr(str(attribute), int(value)) # noqa - elif (cmds.getAttr(str(attribute), type=True)) == "string": - cmds.setAttr(str(attribute), str(value), type = "string") # noqa + attribute = str(attribute) # ensure str conversion from settings + attribute_type = cmds.getAttr(attribute, type=True) + if attribute_type in {"long", "bool"}: + cmds.setAttr(attribute, int(value)) + elif attribute_type == "string": + cmds.setAttr(attribute, str(value), type="string") + elif attribute_type in {"double", "doubleAngle", "doubleLinear"}: + cmds.setAttr(attribute, float(value)) + else: + self.log.error( + "Attribute {attribute} can not be set due to unsupported " + "type: {attribute_type}".format( + attribute=attribute, + attribute_type=attribute_type) + ) diff --git a/openpype/hosts/maya/api/lib_rendersetup.py b/openpype/hosts/maya/api/lib_rendersetup.py index 0fdc54a068..e616f26e1b 100644 --- a/openpype/hosts/maya/api/lib_rendersetup.py +++ b/openpype/hosts/maya/api/lib_rendersetup.py @@ -348,3 +348,71 @@ def get_attr_overrides(node_attr, layer, break return reversed(plug_overrides) + + +def get_shader_in_layer(node, layer): + """Return the assigned shader in a renderlayer without switching layers. + + This has been developed and tested for Legacy Renderlayers and *not* for + Render Setup. + + Note: This will also return the shader for any face assignments, however + it will *not* return the components they are assigned to. This could + be implemented, but since Maya's renderlayers are famous for breaking + with face assignments there has been no need for this function to + support that. + + Returns: + list: The list of assigned shaders in the given layer. + + """ + + def _get_connected_shader(plug): + """Return current shader""" + return cmds.listConnections(plug, + source=False, + destination=True, + plugs=False, + connections=False, + type="shadingEngine") or [] + + # We check the instObjGroups (shader connection) for layer overrides. + plug = node + ".instObjGroups" + + # Ignore complex query if we're in the layer anyway (optimization) + current_layer = cmds.editRenderLayerGlobals(query=True, + currentRenderLayer=True) + if layer == current_layer: + return _get_connected_shader(plug) + + connections = cmds.listConnections(plug, + plugs=True, + source=False, + destination=True, + type="renderLayer") or [] + connections = filter(lambda x: x.endswith(".outPlug"), connections) + if not connections: + # If no overrides anywhere on the shader, just get the current shader + return _get_connected_shader(plug) + + def _get_override(connections, layer): + """Return the overridden connection for that layer in connections""" + # If there's an override on that layer, return that. + for connection in connections: + if (connection.startswith(layer + ".outAdjustments") and + connection.endswith(".outPlug")): + + # This is a shader override on that layer so get the shader + # connected to .outValue of the .outAdjustment[i] + out_adjustment = connection.rsplit(".", 1)[0] + connection_attr = out_adjustment + ".outValue" + override = cmds.listConnections(connection_attr) or [] + + return override + + override_shader = _get_override(connections, layer) + if override_shader is not None: + return override_shader + else: + # Get the override for "defaultRenderLayer" (=masterLayer) + return _get_override(connections, layer="defaultRenderLayer") diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py deleted file mode 100644 index 34a8450a26..0000000000 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ /dev/null @@ -1,253 +0,0 @@ -import json -from collections import OrderedDict -import maya.cmds as cmds - -import qargparse -from openpype.tools.utils.widgets import OptionDialog -from .lib import get_main_window, imprint - -# To change as enum -build_types = ["context_asset", "linked_asset", "all_assets"] - - -def get_placeholder_attributes(node): - return { - attr: cmds.getAttr("{}.{}".format(node, attr)) - for attr in cmds.listAttr(node, userDefined=True)} - - -def delete_placeholder_attributes(node): - ''' - function to delete all extra placeholder attributes - ''' - extra_attributes = get_placeholder_attributes(node) - for attribute in extra_attributes: - cmds.deleteAttr(node + '.' + attribute) - - -def create_placeholder(): - args = placeholder_window() - - if not args: - return # operation canceled, no locator created - - # custom arg parse to force empty data query - # and still imprint them on placeholder - # and getting items when arg is of type Enumerator - options = create_options(args) - - # create placeholder name dynamically from args and options - placeholder_name = create_placeholder_name(args, options) - - selection = cmds.ls(selection=True) - if not selection: - raise ValueError("Nothing is selected") - - placeholder = cmds.spaceLocator(name=placeholder_name)[0] - - # get the long name of the placeholder (with the groups) - placeholder_full_name = cmds.ls(selection[0], long=True)[ - 0] + '|' + placeholder.replace('|', '') - - if selection: - cmds.parent(placeholder, selection[0]) - - imprint(placeholder_full_name, options) - - # Some tweaks because imprint force enums to to default value so we get - # back arg read and force them to attributes - imprint_enum(placeholder_full_name, args) - - # Add helper attributes to keep placeholder info - cmds.addAttr( - placeholder_full_name, - longName="parent", - hidden=True, - dataType="string" - ) - cmds.addAttr( - placeholder_full_name, - longName="index", - hidden=True, - attributeType="short", - defaultValue=-1 - ) - - cmds.setAttr(placeholder_full_name + '.parent', "", type="string") - - -def create_placeholder_name(args, options): - placeholder_builder_type = [ - arg.read() for arg in args if 'builder_type' in str(arg) - ][0] - placeholder_family = options['family'] - placeholder_name = placeholder_builder_type.split('_') - - # add famlily in any - if placeholder_family: - placeholder_name.insert(1, placeholder_family) - - # add loader arguments if any - if options['loader_args']: - pos = 2 - loader_args = options['loader_args'].replace('\'', '\"') - loader_args = json.loads(loader_args) - values = [v for v in loader_args.values()] - for i in range(len(values)): - placeholder_name.insert(i + pos, values[i]) - - placeholder_name = '_'.join(placeholder_name) - - return placeholder_name.capitalize() - - -def update_placeholder(): - placeholder = cmds.ls(selection=True) - if len(placeholder) == 0: - raise ValueError("No node selected") - if len(placeholder) > 1: - raise ValueError("Too many selected nodes") - placeholder = placeholder[0] - - args = placeholder_window(get_placeholder_attributes(placeholder)) - - if not args: - return # operation canceled - - # delete placeholder attributes - delete_placeholder_attributes(placeholder) - - options = create_options(args) - - imprint(placeholder, options) - imprint_enum(placeholder, args) - - cmds.addAttr( - placeholder, - longName="parent", - hidden=True, - dataType="string" - ) - cmds.addAttr( - placeholder, - longName="index", - hidden=True, - attributeType="short", - defaultValue=-1 - ) - - cmds.setAttr(placeholder + '.parent', '', type="string") - - -def create_options(args): - options = OrderedDict() - for arg in args: - if not type(arg) == qargparse.Separator: - options[str(arg)] = arg._data.get("items") or arg.read() - return options - - -def imprint_enum(placeholder, args): - """ - Imprint method doesn't act properly with enums. - Replacing the functionnality with this for now - """ - enum_values = {str(arg): arg.read() - for arg in args if arg._data.get("items")} - string_to_value_enum_table = { - build: i for i, build - in enumerate(build_types)} - for key, value in enum_values.items(): - cmds.setAttr( - placeholder + "." + key, - string_to_value_enum_table[value]) - - -def placeholder_window(options=None): - options = options or dict() - dialog = OptionDialog(parent=get_main_window()) - dialog.setWindowTitle("Create Placeholder") - - args = [ - qargparse.Separator("Main attributes"), - qargparse.Enum( - "builder_type", - label="Asset Builder Type", - default=options.get("builder_type", 0), - items=build_types, - help="""Asset Builder Type -Builder type describe what template loader will look for. -context_asset : Template loader will look for subsets of -current context asset (Asset bob will find asset) -linked_asset : Template loader will look for assets linked -to current context asset. -Linked asset are looked in avalon database under field "inputLinks" -""" - ), - qargparse.String( - "family", - default=options.get("family", ""), - label="OpenPype Family", - placeholder="ex: model, look ..."), - qargparse.String( - "representation", - default=options.get("representation", ""), - label="OpenPype Representation", - placeholder="ex: ma, abc ..."), - qargparse.String( - "loader", - default=options.get("loader", ""), - label="Loader", - placeholder="ex: ReferenceLoader, LightLoader ...", - help="""Loader -Defines what openpype loader will be used to load assets. -Useable loader depends on current host's loader list. -Field is case sensitive. -"""), - qargparse.String( - "loader_args", - default=options.get("loader_args", ""), - label="Loader Arguments", - placeholder='ex: {"camera":"persp", "lights":True}', - help="""Loader -Defines a dictionnary of arguments used to load assets. -Useable arguments depend on current placeholder Loader. -Field should be a valid python dict. Anything else will be ignored. -"""), - qargparse.Integer( - "order", - default=options.get("order", 0), - min=0, - max=999, - label="Order", - placeholder="ex: 0, 100 ... (smallest order loaded first)", - help="""Order -Order defines asset loading priority (0 to 999) -Priority rule is : "lowest is first to load"."""), - qargparse.Separator( - "Optional attributes"), - qargparse.String( - "asset", - default=options.get("asset", ""), - label="Asset filter", - placeholder="regex filtering by asset name", - help="Filtering assets by matching field regex to asset's name"), - qargparse.String( - "subset", - default=options.get("subset", ""), - label="Subset filter", - placeholder="regex filtering by subset name", - help="Filtering assets by matching field regex to subset's name"), - qargparse.String( - "hierarchy", - default=options.get("hierarchy", ""), - label="Hierarchy filter", - placeholder="regex filtering by asset's hierarchy", - help="Filtering assets by matching field asset's hierarchy") - ] - dialog.create(args) - - if not dialog.exec_(): - return None - - return args diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index ebba706a6c..e20f29049b 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -9,16 +9,17 @@ import maya.cmds as cmds from openpype.settings import get_project_settings from openpype.pipeline import legacy_io from openpype.pipeline.workfile import BuildWorkfile -from openpype.pipeline.workfile.build_template import ( - build_workfile_template, - update_workfile_template -) from openpype.tools.utils import host_tools from openpype.hosts.maya.api import lib, lib_rendersettings from .lib import get_main_window, IS_HEADLESS from .commands import reset_frame_range -from .lib_template_builder import create_placeholder, update_placeholder +from .workfile_template_builder import ( + create_placeholder, + update_placeholder, + build_workfile_template, + update_workfile_template, +) log = logging.getLogger(__name__) @@ -104,13 +105,6 @@ def install(): cmds.menuItem(divider=True) - cmds.menuItem( - "Set Render Settings", - command=lambda *args: lib_rendersettings.RenderSettings().set_default_renderer_settings() # noqa - ) - - cmds.menuItem(divider=True) - cmds.menuItem( "Work Files...", command=lambda *args: host_tools.show_workfiles( @@ -132,6 +126,12 @@ def install(): "Set Colorspace", command=lambda *args: lib.set_colorspace(), ) + + cmds.menuItem( + "Set Render Settings", + command=lambda *args: lib_rendersettings.RenderSettings().set_default_renderer_settings() # noqa + ) + cmds.menuItem(divider=True, parent=MENU_NAME) cmds.menuItem( "Build First Workfile", @@ -162,12 +162,12 @@ def install(): cmds.menuItem( "Create Placeholder", parent=builder_menu, - command=lambda *args: create_placeholder() + command=create_placeholder ) cmds.menuItem( "Update Placeholder", parent=builder_menu, - command=lambda *args: update_placeholder() + command=update_placeholder ) cmds.menuItem( "Build Workfile from template", diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index f565f6a308..b3bf738a2b 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -9,14 +9,18 @@ import maya.api.OpenMaya as om import pyblish.api from openpype.settings import get_project_settings -from openpype.host import HostBase, IWorkfileHost, ILoadHost -import openpype.hosts.maya +from openpype.host import ( + HostBase, + IWorkfileHost, + ILoadHost, + HostDirmap, +) from openpype.tools.utils import host_tools +from openpype.tools.workfiles.lock_dialog import WorkfileLockDialog from openpype.lib import ( register_event_callback, emit_event ) -from openpype.lib.path_tools import HostDirmap from openpype.pipeline import ( legacy_io, register_loader_plugin_path, @@ -28,8 +32,17 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.load import any_outdated_containers -from openpype.hosts.maya.lib import copy_workspace_mel +from openpype.pipeline.workfile.lock_workfile import ( + create_workfile_lock, + remove_workfile_lock, + is_workfile_locked, + is_workfile_lock_enabled +) +from openpype.hosts.maya import MAYA_ROOT_DIR +from openpype.hosts.maya.lib import create_workspace_mel + from . import menu, lib +from .workfile_template_builder import MayaPlaceholderLoadPlugin from .workio import ( open_file, save_file, @@ -41,8 +54,7 @@ from .workio import ( log = logging.getLogger("openpype.hosts.maya") -HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.maya.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PLUGINS_DIR = os.path.join(MAYA_ROOT_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") @@ -59,9 +71,10 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): self._op_events = {} def install(self): - project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) + project_name = legacy_io.active_project() + project_settings = get_project_settings(project_name) # process path mapping - dirmap_processor = MayaDirmap("maya", project_settings) + dirmap_processor = MayaDirmap("maya", project_name, project_settings) dirmap_processor.process_dirmap() pyblish.api.register_plugin_path(PUBLISH_PATH) @@ -94,8 +107,13 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): register_event_callback("open", on_open) register_event_callback("new", on_new) register_event_callback("before.save", on_before_save) + register_event_callback("after.save", on_after_save) + register_event_callback("before.close", on_before_close) + register_event_callback("before.file.open", before_file_open) register_event_callback("taskChanged", on_task_changed) + register_event_callback("workfile.open.before", before_workfile_open) register_event_callback("workfile.save.before", before_workfile_save) + register_event_callback("workfile.save.before", after_workfile_save) def open_workfile(self, filepath): return open_file(filepath) @@ -118,6 +136,11 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): def get_containers(self): return ls() + def get_workfile_build_placeholder_plugins(self): + return [ + MayaPlaceholderLoadPlugin + ] + @contextlib.contextmanager def maintained_selection(self): with lib.maintained_selection(): @@ -138,6 +161,13 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): OpenMaya.MSceneMessage.kBeforeSave, _on_scene_save ) + self._op_events[_after_scene_save] = ( + OpenMaya.MSceneMessage.addCallback( + OpenMaya.MSceneMessage.kAfterSave, + _after_scene_save + ) + ) + self._op_events[_before_scene_save] = ( OpenMaya.MSceneMessage.addCheckCallback( OpenMaya.MSceneMessage.kBeforeSaveCheck, @@ -156,15 +186,35 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): ) ) - self._op_events[_on_scene_open] = OpenMaya.MSceneMessage.addCallback( - OpenMaya.MSceneMessage.kAfterOpen, _on_scene_open + self._op_events[_on_scene_open] = ( + OpenMaya.MSceneMessage.addCallback( + OpenMaya.MSceneMessage.kAfterOpen, + _on_scene_open + ) + ) + + self._op_events[_before_scene_open] = ( + OpenMaya.MSceneMessage.addCallback( + OpenMaya.MSceneMessage.kBeforeOpen, + _before_scene_open + ) + ) + + self._op_events[_before_close_maya] = ( + OpenMaya.MSceneMessage.addCallback( + OpenMaya.MSceneMessage.kMayaExiting, + _before_close_maya + ) ) self.log.info("Installed event handler _on_scene_save..") self.log.info("Installed event handler _before_scene_save..") + self.log.info("Installed event handler _on_after_save..") self.log.info("Installed event handler _on_scene_new..") self.log.info("Installed event handler _on_maya_initialized..") self.log.info("Installed event handler _on_scene_open..") + self.log.info("Installed event handler _check_lock_file..") + self.log.info("Installed event handler _before_close_maya..") def _set_project(): @@ -203,6 +253,10 @@ def _on_scene_new(*args): emit_event("new") +def _after_scene_save(*arg): + emit_event("after.save") + + def _on_scene_save(*args): emit_event("save") @@ -211,6 +265,14 @@ def _on_scene_open(*args): emit_event("open") +def _before_close_maya(*args): + emit_event("before.close") + + +def _before_scene_open(*args): + emit_event("before.file.open") + + def _before_scene_save(return_code, client_data): # Default to allowing the action. Registered @@ -224,6 +286,23 @@ def _before_scene_save(return_code, client_data): ) +def _remove_workfile_lock(): + """Remove workfile lock on current file""" + if not handle_workfile_locks(): + return + filepath = current_file() + log.info("Removing lock on current file {}...".format(filepath)) + if filepath: + remove_workfile_lock(filepath) + + +def handle_workfile_locks(): + if lib.IS_HEADLESS: + return False + project_name = legacy_io.active_project() + return is_workfile_lock_enabled(MayaHost.name, project_name) + + def uninstall(): pyblish.api.deregister_plugin_path(PUBLISH_PATH) pyblish.api.deregister_host("mayabatch") @@ -344,21 +423,13 @@ def containerise(name, ("id", AVALON_CONTAINER_ID), ("name", name), ("namespace", namespace), - ("loader", str(loader)), + ("loader", loader), ("representation", context["representation"]["_id"]), ] for key, value in data: - if not value: - continue - - if isinstance(value, (int, float)): - cmds.addAttr(container, longName=key, attributeType="short") - cmds.setAttr(container + "." + key, value) - - else: - cmds.addAttr(container, longName=key, dataType="string") - cmds.setAttr(container + "." + key, value, type="string") + cmds.addAttr(container, longName=key, dataType="string") + cmds.setAttr(container + "." + key, str(value), type="string") main_container = cmds.ls(AVALON_CONTAINERS, type="objectSet") if not main_container: @@ -429,6 +500,46 @@ def on_before_save(): return lib.validate_fps() +def on_after_save(): + """Check if there is a lockfile after save""" + check_lock_on_current_file() + + +def check_lock_on_current_file(): + + """Check if there is a user opening the file""" + if not handle_workfile_locks(): + return + log.info("Running callback on checking the lock file...") + + # add the lock file when opening the file + filepath = current_file() + + if is_workfile_locked(filepath): + # add lockfile dialog + workfile_dialog = WorkfileLockDialog(filepath) + if not workfile_dialog.exec_(): + cmds.file(new=True) + return + + create_workfile_lock(filepath) + + +def on_before_close(): + """Delete the lock file after user quitting the Maya Scene""" + log.info("Closing Maya...") + # delete the lock file + filepath = current_file() + if handle_workfile_locks(): + remove_workfile_lock(filepath) + + +def before_file_open(): + """check lock file when the file changed""" + # delete the lock file + _remove_workfile_lock() + + def on_save(): """Automatically add IDs to new nodes @@ -437,6 +548,8 @@ def on_save(): """ log.info("Running callback on save..") + # remove lockfile if users jumps over from one scene to another + _remove_workfile_lock() # # Update current task for the current scene # update_task_from_path(cmds.file(query=True, sceneName=True)) @@ -494,6 +607,9 @@ def on_open(): dialog.on_clicked.connect(_on_show_inventory) dialog.show() + # create lock file for the maya scene + check_lock_on_current_file() + def on_new(): """Set project resolution and fps when create a new file""" @@ -509,6 +625,7 @@ def on_new(): "from openpype.hosts.maya.api import lib;" "lib.add_render_layer_change_observer()") lib.set_context_settings() + _remove_workfile_lock() def on_task_changed(): @@ -536,7 +653,7 @@ def on_task_changed(): lib.update_content_on_context_change() msg = " project: {}\n asset: {}\n task:{}".format( - legacy_io.Session["AVALON_PROJECT"], + legacy_io.active_project(), legacy_io.Session["AVALON_ASSET"], legacy_io.Session["AVALON_TASK"] ) @@ -547,10 +664,26 @@ def on_task_changed(): ) +def before_workfile_open(): + if handle_workfile_locks(): + _remove_workfile_lock() + + def before_workfile_save(event): + project_name = legacy_io.active_project() + if handle_workfile_locks(): + _remove_workfile_lock() workdir_path = event["workdir_path"] if workdir_path: - copy_workspace_mel(workdir_path) + create_workspace_mel(workdir_path, project_name) + + +def after_workfile_save(event): + workfile_name = event["filename"] + if handle_workfile_locks(): + if workfile_name: + if not is_workfile_locked(workfile_name): + create_workfile_lock(workfile_name) class MayaDirmap(HostDirmap): diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index e50ebfccad..39d821f620 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -4,6 +4,7 @@ from maya import cmds import qargparse +from openpype.lib import Logger from openpype.pipeline import ( LegacyCreator, LoaderPlugin, @@ -50,9 +51,7 @@ def get_reference_node(members, log=None): # Warn the user when we're taking the highest reference node if len(references) > 1: if not log: - from openpype.lib import PypeLogger - - log = PypeLogger().get_logger(__name__) + log = Logger.get_logger(__name__) log.warning("More than one reference node found in " "container, using highest reference node: " diff --git a/openpype/hosts/maya/api/template_loader.py b/openpype/hosts/maya/api/template_loader.py deleted file mode 100644 index ecffafc93d..0000000000 --- a/openpype/hosts/maya/api/template_loader.py +++ /dev/null @@ -1,252 +0,0 @@ -import re -from maya import cmds - -from openpype.client import get_representations -from openpype.pipeline import legacy_io -from openpype.pipeline.workfile.abstract_template_loader import ( - AbstractPlaceholder, - AbstractTemplateLoader -) -from openpype.pipeline.workfile.build_template_exceptions import ( - TemplateAlreadyImported -) - -PLACEHOLDER_SET = 'PLACEHOLDERS_SET' - - -class MayaTemplateLoader(AbstractTemplateLoader): - """Concrete implementation of AbstractTemplateLoader for maya - """ - - def import_template(self, path): - """Import template into current scene. - Block if a template is already loaded. - Args: - path (str): A path to current template (usually given by - get_template_path implementation) - Returns: - bool: Wether the template was succesfully imported or not - """ - if cmds.objExists(PLACEHOLDER_SET): - raise TemplateAlreadyImported( - "Build template already loaded\n" - "Clean scene if needed (File > New Scene)") - - cmds.sets(name=PLACEHOLDER_SET, empty=True) - self.new_nodes = cmds.file(path, i=True, returnNewNodes=True) - cmds.setAttr(PLACEHOLDER_SET + '.hiddenInOutliner', True) - - for set in cmds.listSets(allSets=True): - if (cmds.objExists(set) and - cmds.attributeQuery('id', node=set, exists=True) and - cmds.getAttr(set + '.id') == 'pyblish.avalon.instance'): - if cmds.attributeQuery('asset', node=set, exists=True): - cmds.setAttr( - set + '.asset', - legacy_io.Session['AVALON_ASSET'], type='string' - ) - - return True - - def template_already_imported(self, err_msg): - clearButton = "Clear scene and build" - updateButton = "Update template" - abortButton = "Abort" - - title = "Scene already builded" - message = ( - "It's seems a template was already build for this scene.\n" - "Error message reveived :\n\n\"{}\"".format(err_msg)) - buttons = [clearButton, updateButton, abortButton] - defaultButton = clearButton - cancelButton = abortButton - dismissString = abortButton - answer = cmds.confirmDialog( - t=title, - m=message, - b=buttons, - db=defaultButton, - cb=cancelButton, - ds=dismissString) - - if answer == clearButton: - cmds.file(newFile=True, force=True) - self.import_template(self.template_path) - self.populate_template() - elif answer == updateButton: - self.update_missing_containers() - elif answer == abortButton: - return - - @staticmethod - def get_template_nodes(): - attributes = cmds.ls('*.builder_type', long=True) - return [attribute.rpartition('.')[0] for attribute in attributes] - - def get_loaded_containers_by_id(self): - try: - containers = cmds.sets("AVALON_CONTAINERS", q=True) - except ValueError: - return None - - return [ - cmds.getAttr(container + '.representation') - for container in containers] - - -class MayaPlaceholder(AbstractPlaceholder): - """Concrete implementation of AbstractPlaceholder for maya - """ - - optional_keys = {'asset', 'subset', 'hierarchy'} - - def get_data(self, node): - user_data = dict() - for attr in self.required_keys.union(self.optional_keys): - attribute_name = '{}.{}'.format(node, attr) - if not cmds.attributeQuery(attr, node=node, exists=True): - print("{} not found".format(attribute_name)) - continue - user_data[attr] = cmds.getAttr( - attribute_name, - asString=True) - user_data['parent'] = ( - cmds.getAttr(node + '.parent', asString=True) - or node.rpartition('|')[0] - or "" - ) - user_data['node'] = node - if user_data['parent']: - siblings = cmds.listRelatives(user_data['parent'], children=True) - else: - siblings = cmds.ls(assemblies=True) - node_shortname = user_data['node'].rpartition('|')[2] - current_index = cmds.getAttr(node + '.index', asString=True) - user_data['index'] = ( - current_index if current_index >= 0 - else siblings.index(node_shortname)) - - self.data = user_data - - def parent_in_hierarchy(self, containers): - """Parent loaded container to placeholder's parent - ie : Set loaded content as placeholder's sibling - Args: - containers (String): Placeholder loaded containers - """ - if not containers: - return - - roots = cmds.sets(containers, q=True) - nodes_to_parent = [] - for root in roots: - if root.endswith("_RN"): - refRoot = cmds.referenceQuery(root, n=True)[0] - refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot] - nodes_to_parent.extend(refRoot) - elif root in cmds.listSets(allSets=True): - if not cmds.sets(root, q=True): - return - else: - continue - else: - nodes_to_parent.append(root) - - if self.data['parent']: - cmds.parent(nodes_to_parent, self.data['parent']) - # Move loaded nodes to correct index in outliner hierarchy - placeholder_node = self.data['node'] - placeholder_form = cmds.xform( - placeholder_node, - q=True, - matrix=True, - worldSpace=True - ) - for node in set(nodes_to_parent): - cmds.reorder(node, front=True) - cmds.reorder(node, relative=self.data['index']) - cmds.xform(node, matrix=placeholder_form, ws=True) - - holding_sets = cmds.listSets(object=placeholder_node) - if not holding_sets: - return - for holding_set in holding_sets: - cmds.sets(roots, forceElement=holding_set) - - def clean(self): - """Hide placeholder, parent them to root - add them to placeholder set and register placeholder's parent - to keep placeholder info available for future use - """ - node = self.data['node'] - if self.data['parent']: - cmds.setAttr(node + '.parent', self.data['parent'], type='string') - if cmds.getAttr(node + '.index') < 0: - cmds.setAttr(node + '.index', self.data['index']) - - holding_sets = cmds.listSets(object=node) - if holding_sets: - for set in holding_sets: - cmds.sets(node, remove=set) - - if cmds.listRelatives(node, p=True): - node = cmds.parent(node, world=True)[0] - cmds.sets(node, addElement=PLACEHOLDER_SET) - cmds.hide(node) - cmds.setAttr(node + '.hiddenInOutliner', True) - - def get_representations(self, current_asset_doc, linked_asset_docs): - project_name = legacy_io.active_project() - - builder_type = self.data["builder_type"] - if builder_type == "context_asset": - context_filters = { - "asset": [current_asset_doc["name"]], - "subset": [re.compile(self.data["subset"])], - "hierarchy": [re.compile(self.data["hierarchy"])], - "representations": [self.data["representation"]], - "family": [self.data["family"]] - } - - elif builder_type != "linked_asset": - context_filters = { - "asset": [re.compile(self.data["asset"])], - "subset": [re.compile(self.data["subset"])], - "hierarchy": [re.compile(self.data["hierarchy"])], - "representation": [self.data["representation"]], - "family": [self.data["family"]] - } - - else: - asset_regex = re.compile(self.data["asset"]) - linked_asset_names = [] - for asset_doc in linked_asset_docs: - asset_name = asset_doc["name"] - if asset_regex.match(asset_name): - linked_asset_names.append(asset_name) - - context_filters = { - "asset": linked_asset_names, - "subset": [re.compile(self.data["subset"])], - "hierarchy": [re.compile(self.data["hierarchy"])], - "representation": [self.data["representation"]], - "family": [self.data["family"]], - } - - return list(get_representations( - project_name, - context_filters=context_filters - )) - - def err_message(self): - return ( - "Error while trying to load a representation.\n" - "Either the subset wasn't published or the template is malformed." - "\n\n" - "Builder was looking for :\n{attributes}".format( - attributes="\n".join([ - "{}: {}".format(key.title(), value) - for key, value in self.data.items()] - ) - ) - ) diff --git a/openpype/hosts/maya/api/workfile_template_builder.py b/openpype/hosts/maya/api/workfile_template_builder.py new file mode 100644 index 0000000000..ef043ed0f4 --- /dev/null +++ b/openpype/hosts/maya/api/workfile_template_builder.py @@ -0,0 +1,330 @@ +import json + +from maya import cmds + +from openpype.pipeline import registered_host +from openpype.pipeline.workfile.workfile_template_builder import ( + TemplateAlreadyImported, + AbstractTemplateBuilder, + PlaceholderPlugin, + LoadPlaceholderItem, + PlaceholderLoadMixin, +) +from openpype.tools.workfile_template_build import ( + WorkfileBuildPlaceholderDialog, +) + +from .lib import read, imprint + +PLACEHOLDER_SET = "PLACEHOLDERS_SET" + + +class MayaTemplateBuilder(AbstractTemplateBuilder): + """Concrete implementation of AbstractTemplateBuilder for maya""" + + def import_template(self, path): + """Import template into current scene. + Block if a template is already loaded. + + Args: + path (str): A path to current template (usually given by + get_template_path implementation) + + Returns: + bool: Wether the template was succesfully imported or not + """ + + if cmds.objExists(PLACEHOLDER_SET): + raise TemplateAlreadyImported(( + "Build template already loaded\n" + "Clean scene if needed (File > New Scene)" + )) + + cmds.sets(name=PLACEHOLDER_SET, empty=True) + cmds.file(path, i=True, returnNewNodes=True) + + cmds.setAttr(PLACEHOLDER_SET + ".hiddenInOutliner", True) + + return True + + +class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): + identifier = "maya.load" + label = "Maya load" + + def _collect_scene_placeholders(self): + # Cache placeholder data to shared data + placeholder_nodes = self.builder.get_shared_populate_data( + "placeholder_nodes" + ) + if placeholder_nodes is None: + attributes = cmds.ls("*.plugin_identifier", long=True) + placeholder_nodes = {} + for attribute in attributes: + node_name = attribute.rpartition(".")[0] + placeholder_nodes[node_name] = ( + self._parse_placeholder_node_data(node_name) + ) + + self.builder.set_shared_populate_data( + "placeholder_nodes", placeholder_nodes + ) + return placeholder_nodes + + def _parse_placeholder_node_data(self, node_name): + placeholder_data = read(node_name) + parent_name = ( + cmds.getAttr(node_name + ".parent", asString=True) + or node_name.rpartition("|")[0] + or "" + ) + if parent_name: + siblings = cmds.listRelatives(parent_name, children=True) + else: + siblings = cmds.ls(assemblies=True) + node_shortname = node_name.rpartition("|")[2] + current_index = cmds.getAttr(node_name + ".index", asString=True) + if current_index < 0: + current_index = siblings.index(node_shortname) + + placeholder_data.update({ + "parent": parent_name, + "index": current_index + }) + return placeholder_data + + def _create_placeholder_name(self, placeholder_data): + placeholder_name_parts = placeholder_data["builder_type"].split("_") + + pos = 1 + # add famlily in any + placeholder_family = placeholder_data["family"] + if placeholder_family: + placeholder_name_parts.insert(pos, placeholder_family) + pos += 1 + + # add loader arguments if any + loader_args = placeholder_data["loader_args"] + if loader_args: + loader_args = json.loads(loader_args.replace('\'', '\"')) + values = [v for v in loader_args.values()] + for value in values: + placeholder_name_parts.insert(pos, value) + pos += 1 + + placeholder_name = "_".join(placeholder_name_parts) + + return placeholder_name.capitalize() + + def _get_loaded_repre_ids(self): + loaded_representation_ids = self.builder.get_shared_populate_data( + "loaded_representation_ids" + ) + if loaded_representation_ids is None: + try: + containers = cmds.sets("AVALON_CONTAINERS", q=True) + except ValueError: + containers = [] + + loaded_representation_ids = { + cmds.getAttr(container + ".representation") + for container in containers + } + self.builder.set_shared_populate_data( + "loaded_representation_ids", loaded_representation_ids + ) + return loaded_representation_ids + + def create_placeholder(self, placeholder_data): + selection = cmds.ls(selection=True) + if not selection: + raise ValueError("Nothing is selected") + if len(selection) > 1: + raise ValueError("More then one item are selected") + + placeholder_data["plugin_identifier"] = self.identifier + + placeholder_name = self._create_placeholder_name(placeholder_data) + + placeholder = cmds.spaceLocator(name=placeholder_name)[0] + # TODO: this can crash if selection can't be used + cmds.parent(placeholder, selection[0]) + + # get the long name of the placeholder (with the groups) + placeholder_full_name = ( + cmds.ls(selection[0], long=True)[0] + + "|" + + placeholder.replace("|", "") + ) + + imprint(placeholder_full_name, placeholder_data) + + # Add helper attributes to keep placeholder info + cmds.addAttr( + placeholder_full_name, + longName="parent", + hidden=True, + dataType="string" + ) + cmds.addAttr( + placeholder_full_name, + longName="index", + hidden=True, + attributeType="short", + defaultValue=-1 + ) + + cmds.setAttr(placeholder_full_name + ".parent", "", type="string") + + def update_placeholder(self, placeholder_item, placeholder_data): + node_name = placeholder_item.scene_identifier + new_values = {} + for key, value in placeholder_data.items(): + placeholder_value = placeholder_item.data.get(key) + if value != placeholder_value: + new_values[key] = value + placeholder_item.data[key] = value + + for key in new_values.keys(): + cmds.deleteAttr(node_name + "." + key) + + imprint(node_name, new_values) + + def collect_placeholders(self): + output = [] + scene_placeholders = self._collect_scene_placeholders() + for node_name, placeholder_data in scene_placeholders.items(): + if placeholder_data.get("plugin_identifier") != self.identifier: + continue + + # TODO do data validations and maybe updgrades if are invalid + output.append( + LoadPlaceholderItem(node_name, placeholder_data, self) + ) + + return output + + def populate_placeholder(self, placeholder): + self.populate_load_placeholder(placeholder) + + def repopulate_placeholder(self, placeholder): + repre_ids = self._get_loaded_repre_ids() + self.populate_load_placeholder(placeholder, repre_ids) + + def get_placeholder_options(self, options=None): + return self.get_load_plugin_options(options) + + def cleanup_placeholder(self, placeholder, failed): + """Hide placeholder, parent them to root + add them to placeholder set and register placeholder's parent + to keep placeholder info available for future use + """ + + node = placeholder._scene_identifier + node_parent = placeholder.data["parent"] + if node_parent: + cmds.setAttr(node + ".parent", node_parent, type="string") + + if cmds.getAttr(node + ".index") < 0: + cmds.setAttr(node + ".index", placeholder.data["index"]) + + holding_sets = cmds.listSets(object=node) + if holding_sets: + for set in holding_sets: + cmds.sets(node, remove=set) + + if cmds.listRelatives(node, p=True): + node = cmds.parent(node, world=True)[0] + cmds.sets(node, addElement=PLACEHOLDER_SET) + cmds.hide(node) + cmds.setAttr(node + ".hiddenInOutliner", True) + + def load_succeed(self, placeholder, container): + self._parent_in_hierarhchy(placeholder, container) + + def _parent_in_hierarchy(self, placeholder, container): + """Parent loaded container to placeholder's parent. + + ie : Set loaded content as placeholder's sibling + + Args: + container (str): Placeholder loaded containers + """ + + if not container: + return + + roots = cmds.sets(container, q=True) + nodes_to_parent = [] + for root in roots: + if root.endswith("_RN"): + refRoot = cmds.referenceQuery(root, n=True)[0] + refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot] + nodes_to_parent.extend(refRoot) + elif root not in cmds.listSets(allSets=True): + nodes_to_parent.append(root) + + elif not cmds.sets(root, q=True): + return + + if placeholder.data["parent"]: + cmds.parent(nodes_to_parent, placeholder.data["parent"]) + # Move loaded nodes to correct index in outliner hierarchy + placeholder_form = cmds.xform( + placeholder.scene_identifier, + q=True, + matrix=True, + worldSpace=True + ) + for node in set(nodes_to_parent): + cmds.reorder(node, front=True) + cmds.reorder(node, relative=placeholder.data["index"]) + cmds.xform(node, matrix=placeholder_form, ws=True) + + holding_sets = cmds.listSets(object=placeholder.scene_identifier) + if not holding_sets: + return + for holding_set in holding_sets: + cmds.sets(roots, forceElement=holding_set) + + +def build_workfile_template(*args): + builder = MayaTemplateBuilder(registered_host()) + builder.build_template() + + +def update_workfile_template(*args): + builder = MayaTemplateBuilder(registered_host()) + builder.rebuild_template() + + +def create_placeholder(*args): + host = registered_host() + builder = MayaTemplateBuilder(host) + window = WorkfileBuildPlaceholderDialog(host, builder) + window.exec_() + + +def update_placeholder(*args): + host = registered_host() + builder = MayaTemplateBuilder(host) + placeholder_items_by_id = { + placeholder_item.scene_identifier: placeholder_item + for placeholder_item in builder.get_placeholders() + } + placeholder_items = [] + for node_name in cmds.ls(selection=True, long=True): + if node_name in placeholder_items_by_id: + placeholder_items.append(placeholder_items_by_id[node_name]) + + # TODO show UI at least + if len(placeholder_items) == 0: + raise ValueError("No node selected") + + if len(placeholder_items) > 1: + raise ValueError("Too many selected nodes") + + placeholder_item = placeholder_items[0] + window = WorkfileBuildPlaceholderDialog(host, builder) + window.set_update_mode(placeholder_item) + window.exec_() diff --git a/openpype/hosts/maya/hooks/pre_copy_mel.py b/openpype/hosts/maya/hooks/pre_copy_mel.py index b11e18241e..6f90af4b7c 100644 --- a/openpype/hosts/maya/hooks/pre_copy_mel.py +++ b/openpype/hosts/maya/hooks/pre_copy_mel.py @@ -1,5 +1,5 @@ from openpype.lib import PreLaunchHook -from openpype.hosts.maya.lib import copy_workspace_mel +from openpype.hosts.maya.lib import create_workspace_mel class PreCopyMel(PreLaunchHook): @@ -10,9 +10,10 @@ class PreCopyMel(PreLaunchHook): app_groups = ["maya"] def execute(self): + project_name = self.launch_context.env.get("AVALON_PROJECT") workdir = self.launch_context.env.get("AVALON_WORKDIR") if not workdir: self.log.warning("BUG: Workdir is not filled.") return - copy_workspace_mel(workdir) + create_workspace_mel(workdir, project_name) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index 6c142053e6..ffb2f0b27c 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -1,26 +1,24 @@ import os -import shutil +from openpype.settings import get_project_settings +from openpype.lib import Logger -def copy_workspace_mel(workdir): - # Check that source mel exists - current_dir = os.path.dirname(os.path.abspath(__file__)) - src_filepath = os.path.join(current_dir, "resources", "workspace.mel") - if not os.path.exists(src_filepath): - print("Source mel file does not exist. {}".format(src_filepath)) - return - - # Skip if workspace.mel already exists +def create_workspace_mel(workdir, project_name): dst_filepath = os.path.join(workdir, "workspace.mel") if os.path.exists(dst_filepath): return - # Create workdir if does not exists yet if not os.path.exists(workdir): os.makedirs(workdir) - # Copy file - print("Copying workspace mel \"{}\" -> \"{}\"".format( - src_filepath, dst_filepath - )) - shutil.copy(src_filepath, dst_filepath) + project_setting = get_project_settings(project_name) + mel_script = project_setting["maya"].get("mel_workspace") + + # Skip if mel script in settings is empty + if not mel_script: + log = Logger.get_logger("create_workspace_mel") + log.debug("File 'workspace.mel' not created. Settings value is empty.") + return + + with open(dst_filepath, "w") as mel_file: + mel_file.write(mel_script) diff --git a/openpype/hosts/maya/plugins/create/create_animation.py b/openpype/hosts/maya/plugins/create/create_animation.py index e47d4e5b5a..5ef5f61ab1 100644 --- a/openpype/hosts/maya/plugins/create/create_animation.py +++ b/openpype/hosts/maya/plugins/create/create_animation.py @@ -12,6 +12,7 @@ class CreateAnimation(plugin.Creator): family = "animation" icon = "male" write_color_sets = False + write_face_sets = False def __init__(self, *args, **kwargs): super(CreateAnimation, self).__init__(*args, **kwargs) @@ -24,7 +25,7 @@ class CreateAnimation(plugin.Creator): # Write vertex colors with the geometry. self.data["writeColorSets"] = self.write_color_sets - self.data["writeFaceSets"] = False + self.data["writeFaceSets"] = self.write_face_sets # Include only renderable visible shapes. # Skips locators and empty transforms diff --git a/openpype/hosts/maya/plugins/create/create_model.py b/openpype/hosts/maya/plugins/create/create_model.py index 37faad23a0..520e962f74 100644 --- a/openpype/hosts/maya/plugins/create/create_model.py +++ b/openpype/hosts/maya/plugins/create/create_model.py @@ -9,13 +9,14 @@ class CreateModel(plugin.Creator): family = "model" icon = "cube" defaults = ["Main", "Proxy", "_MD", "_HD", "_LD"] - + write_color_sets = False + write_face_sets = False def __init__(self, *args, **kwargs): super(CreateModel, self).__init__(*args, **kwargs) # Vertex colors with the geometry - self.data["writeColorSets"] = False - self.data["writeFaceSets"] = False + self.data["writeColorSets"] = self.write_color_sets + self.data["writeFaceSets"] = self.write_face_sets # Include attributes by attribute name or prefix self.data["attr"] = "" diff --git a/openpype/hosts/maya/plugins/create/create_pointcache.py b/openpype/hosts/maya/plugins/create/create_pointcache.py index 5516445de8..ab8fe12079 100644 --- a/openpype/hosts/maya/plugins/create/create_pointcache.py +++ b/openpype/hosts/maya/plugins/create/create_pointcache.py @@ -12,6 +12,7 @@ class CreatePointCache(plugin.Creator): family = "pointcache" icon = "gears" write_color_sets = False + write_face_sets = False def __init__(self, *args, **kwargs): super(CreatePointCache, self).__init__(*args, **kwargs) @@ -21,7 +22,8 @@ class CreatePointCache(plugin.Creator): # Vertex colors with the geometry. self.data["writeColorSets"] = self.write_color_sets - self.data["writeFaceSets"] = False # Vertex colors with the geometry. + # Vertex colors with the geometry. + self.data["writeFaceSets"] = self.write_face_sets self.data["renderableOnly"] = False # Only renderable visible shapes self.data["visibleOnly"] = False # only nodes that are visible self.data["includeParentHierarchy"] = False # Include parent groups diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 5418ec1f2f..a3e1272652 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -9,26 +9,18 @@ import requests from maya import cmds from maya.app.renderSetup.model import renderSetup -from openpype.api import ( +from openpype.settings import ( get_system_settings, get_project_settings, ) +from openpype.lib import requests_get +from openpype.modules import ModulesManager +from openpype.pipeline import legacy_io from openpype.hosts.maya.api import ( lib, lib_rendersettings, plugin ) -from openpype.lib import requests_get -from openpype.api import ( - get_system_settings, - get_project_settings) -from openpype.modules import ModulesManager -from openpype.pipeline import legacy_io -from openpype.pipeline import ( - CreatorError, - legacy_io, -) -from openpype.pipeline.context_tools import get_current_project_asset class CreateRender(plugin.Creator): diff --git a/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py b/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py index 4e4417ff34..44cbee0502 100644 --- a/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py +++ b/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Creator for Unreal Static Meshes.""" from openpype.hosts.maya.api import plugin, lib -from openpype.api import get_project_settings +from openpype.settings import get_project_settings from openpype.pipeline import legacy_io from maya import cmds # noqa diff --git a/openpype/hosts/maya/plugins/create/create_vrayscene.py b/openpype/hosts/maya/plugins/create/create_vrayscene.py index 45c4b7e443..59d80e6d5b 100644 --- a/openpype/hosts/maya/plugins/create/create_vrayscene.py +++ b/openpype/hosts/maya/plugins/create/create_vrayscene.py @@ -12,7 +12,7 @@ from openpype.hosts.maya.api import ( lib, plugin ) -from openpype.api import ( +from openpype.settings import ( get_system_settings, get_project_settings ) diff --git a/openpype/hosts/maya/plugins/inventory/select_containers.py b/openpype/hosts/maya/plugins/inventory/select_containers.py new file mode 100644 index 0000000000..f85bf17ab0 --- /dev/null +++ b/openpype/hosts/maya/plugins/inventory/select_containers.py @@ -0,0 +1,46 @@ +from maya import cmds + +from openpype.pipeline import InventoryAction, registered_host +from openpype.hosts.maya.api.lib import get_container_members + + +class SelectInScene(InventoryAction): + """Select nodes in the scene from selected containers in scene inventory""" + + label = "Select in scene" + icon = "search" + color = "#888888" + order = 99 + + def process(self, containers): + + all_members = [] + for container in containers: + members = get_container_members(container) + all_members.extend(members) + cmds.select(all_members, replace=True, noExpand=True) + + +class HighlightBySceneSelection(InventoryAction): + """Select containers in scene inventory from the current scene selection""" + + label = "Highlight by scene selection" + icon = "search" + color = "#888888" + order = 100 + + def process(self, containers): + + selection = set(cmds.ls(selection=True, long=True, objectsOnly=True)) + host = registered_host() + + to_select = [] + for container in host.get_containers(): + members = get_container_members(container) + if any(member in selection for member in members): + to_select.append(container["objectName"]) + + return { + "objectNames": to_select, + "options": {"clear": True} + } diff --git a/openpype/hosts/maya/plugins/load/_load_animation.py b/openpype/hosts/maya/plugins/load/_load_animation.py index 0010efb829..b419a730b5 100644 --- a/openpype/hosts/maya/plugins/load/_load_animation.py +++ b/openpype/hosts/maya/plugins/load/_load_animation.py @@ -36,7 +36,7 @@ class AbcLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): # hero_001 (abc) # asset_counter{optional} file_url = self.prepare_root_value(self.fname, - context["project"]["code"]) + context["project"]["name"]) nodes = cmds.file(file_url, namespace=namespace, sharedReferenceFile=False, diff --git a/openpype/hosts/maya/plugins/load/load_ass.py b/openpype/hosts/maya/plugins/load/load_ass.py index 1f0eb88995..5db6fc3dfa 100644 --- a/openpype/hosts/maya/plugins/load/load_ass.py +++ b/openpype/hosts/maya/plugins/load/load_ass.py @@ -1,7 +1,7 @@ import os import clique -from openpype.api import get_project_settings +from openpype.settings import get_project_settings from openpype.pipeline import ( load, get_representation_path @@ -65,8 +65,9 @@ class AssProxyLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): proxyPath = proxyPath_base + ".ma" + project_name = context["project"]["name"] file_url = self.prepare_root_value(proxyPath, - context["project"]["code"]) + project_name) nodes = cmds.file(file_url, namespace=namespace, @@ -85,7 +86,7 @@ class AssProxyLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): proxyShape.dso.set(path) proxyShape.aiOverrideShaders.set(0) - settings = get_project_settings(os.environ['AVALON_PROJECT']) + settings = get_project_settings(project_name) colors = settings['maya']['load']['colors'] c = colors.get(family) @@ -128,7 +129,7 @@ class AssProxyLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): file_url = self.prepare_root_value(proxyPath, representation["context"] ["project"] - ["code"]) + ["name"]) content = cmds.file(file_url, loadReference=reference_node, type="mayaAscii", diff --git a/openpype/hosts/maya/plugins/load/load_gpucache.py b/openpype/hosts/maya/plugins/load/load_gpucache.py index 179819f904..a09f924c7b 100644 --- a/openpype/hosts/maya/plugins/load/load_gpucache.py +++ b/openpype/hosts/maya/plugins/load/load_gpucache.py @@ -4,7 +4,7 @@ from openpype.pipeline import ( load, get_representation_path ) -from openpype.api import get_project_settings +from openpype.settings import get_project_settings class GpuCacheLoader(load.LoaderPlugin): diff --git a/openpype/hosts/maya/plugins/load/load_look.py b/openpype/hosts/maya/plugins/load/load_look.py index 7392adc4dd..3ef19ad96f 100644 --- a/openpype/hosts/maya/plugins/load/load_look.py +++ b/openpype/hosts/maya/plugins/load/load_look.py @@ -33,7 +33,7 @@ class LookLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): with lib.maintained_selection(): file_url = self.prepare_root_value(self.fname, - context["project"]["code"]) + context["project"]["name"]) nodes = cmds.file(file_url, namespace=namespace, reference=True, diff --git a/openpype/hosts/maya/plugins/load/load_redshift_proxy.py b/openpype/hosts/maya/plugins/load/load_redshift_proxy.py index d93a9f02a2..c288e23ded 100644 --- a/openpype/hosts/maya/plugins/load/load_redshift_proxy.py +++ b/openpype/hosts/maya/plugins/load/load_redshift_proxy.py @@ -5,7 +5,7 @@ import clique import maya.cmds as cmds -from openpype.api import get_project_settings +from openpype.settings import get_project_settings from openpype.pipeline import ( load, get_representation_path diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py index e4355ed3d4..c762a29326 100644 --- a/openpype/hosts/maya/plugins/load/load_reference.py +++ b/openpype/hosts/maya/plugins/load/load_reference.py @@ -1,11 +1,11 @@ import os from maya import cmds -from openpype.api import get_project_settings -from openpype.lib import get_creator_by_name -from openpype.pipeline import ( - legacy_io, +from openpype.settings import get_project_settings +from openpype.pipeline import legacy_io +from openpype.pipeline.create import ( legacy_create, + get_legacy_creator_by_name, ) import openpype.hosts.maya.api.plugin from openpype.hosts.maya.api.lib import maintained_selection @@ -52,7 +52,7 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): with maintained_selection(): cmds.loadPlugin("AbcImport.mll", quiet=True) file_url = self.prepare_root_value(self.fname, - context["project"]["code"]) + context["project"]["name"]) nodes = cmds.file(file_url, namespace=namespace, sharedReferenceFile=False, @@ -153,7 +153,9 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): self.log.info("Creating subset: {}".format(namespace)) # Create the animation instance - creator_plugin = get_creator_by_name(self.animation_creator_name) + creator_plugin = get_legacy_creator_by_name( + self.animation_creator_name + ) with maintained_selection(): cmds.select([output, controls] + roots, noExpand=True) legacy_create( diff --git a/openpype/hosts/maya/plugins/load/load_vdb_to_arnold.py b/openpype/hosts/maya/plugins/load/load_vdb_to_arnold.py index d458c5abda..8a386cecfd 100644 --- a/openpype/hosts/maya/plugins/load/load_vdb_to_arnold.py +++ b/openpype/hosts/maya/plugins/load/load_vdb_to_arnold.py @@ -1,6 +1,6 @@ import os -from openpype.api import get_project_settings +from openpype.settings import get_project_settings from openpype.pipeline import ( load, get_representation_path diff --git a/openpype/hosts/maya/plugins/load/load_vdb_to_redshift.py b/openpype/hosts/maya/plugins/load/load_vdb_to_redshift.py index c6a69dfe35..1f02321dc8 100644 --- a/openpype/hosts/maya/plugins/load/load_vdb_to_redshift.py +++ b/openpype/hosts/maya/plugins/load/load_vdb_to_redshift.py @@ -1,6 +1,6 @@ import os -from openpype.api import get_project_settings +from openpype.settings import get_project_settings from openpype.pipeline import ( load, get_representation_path diff --git a/openpype/hosts/maya/plugins/load/load_vdb_to_vray.py b/openpype/hosts/maya/plugins/load/load_vdb_to_vray.py index 3a16264ec0..9267c59c02 100644 --- a/openpype/hosts/maya/plugins/load/load_vdb_to_vray.py +++ b/openpype/hosts/maya/plugins/load/load_vdb_to_vray.py @@ -1,6 +1,6 @@ import os -from openpype.api import get_project_settings +from openpype.settings import get_project_settings from openpype.pipeline import ( load, get_representation_path diff --git a/openpype/hosts/maya/plugins/load/load_vrayproxy.py b/openpype/hosts/maya/plugins/load/load_vrayproxy.py index e3d6166d3a..720a132aa7 100644 --- a/openpype/hosts/maya/plugins/load/load_vrayproxy.py +++ b/openpype/hosts/maya/plugins/load/load_vrayproxy.py @@ -10,7 +10,7 @@ import os import maya.cmds as cmds from openpype.client import get_representation_by_name -from openpype.api import get_project_settings +from openpype.settings import get_project_settings from openpype.pipeline import ( legacy_io, load, diff --git a/openpype/hosts/maya/plugins/load/load_vrayscene.py b/openpype/hosts/maya/plugins/load/load_vrayscene.py index 61132088cc..d87992f9a7 100644 --- a/openpype/hosts/maya/plugins/load/load_vrayscene.py +++ b/openpype/hosts/maya/plugins/load/load_vrayscene.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- import os import maya.cmds as cmds # noqa -from openpype.api import get_project_settings +from openpype.settings import get_project_settings from openpype.pipeline import ( load, get_representation_path diff --git a/openpype/hosts/maya/plugins/load/load_yeti_cache.py b/openpype/hosts/maya/plugins/load/load_yeti_cache.py index 8435ba2493..090047e22d 100644 --- a/openpype/hosts/maya/plugins/load/load_yeti_cache.py +++ b/openpype/hosts/maya/plugins/load/load_yeti_cache.py @@ -6,7 +6,7 @@ from collections import defaultdict import clique from maya import cmds -from openpype.api import get_project_settings +from openpype.settings import get_project_settings from openpype.pipeline import ( load, get_representation_path @@ -250,7 +250,7 @@ class YetiCacheLoader(load.LoaderPlugin): """ name = node_name.replace(":", "_") - pattern = r"^({name})(\.[0-4]+)?(\.fur)$".format(name=re.escape(name)) + pattern = r"^({name})(\.[0-9]+)?(\.fur)$".format(name=re.escape(name)) files = [fname for fname in os.listdir(root) if re.match(pattern, fname)] diff --git a/openpype/hosts/maya/plugins/load/load_yeti_rig.py b/openpype/hosts/maya/plugins/load/load_yeti_rig.py index 241c28467a..651607de8a 100644 --- a/openpype/hosts/maya/plugins/load/load_yeti_rig.py +++ b/openpype/hosts/maya/plugins/load/load_yeti_rig.py @@ -1,7 +1,7 @@ import os from collections import defaultdict -from openpype.api import get_project_settings +from openpype.settings import get_project_settings import openpype.hosts.maya.api.plugin from openpype.hosts.maya.api import lib @@ -54,7 +54,7 @@ class YetiRigLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): # load rig with lib.maintained_selection(): file_url = self.prepare_root_value(self.fname, - context["project"]["code"]) + context["project"]["name"]) nodes = cmds.file(file_url, namespace=namespace, reference=True, diff --git a/openpype/hosts/maya/plugins/publish/collect_assembly.py b/openpype/hosts/maya/plugins/publish/collect_assembly.py index 1a65bf1fde..2aef9ab908 100644 --- a/openpype/hosts/maya/plugins/publish/collect_assembly.py +++ b/openpype/hosts/maya/plugins/publish/collect_assembly.py @@ -70,7 +70,7 @@ class CollectAssembly(pyblish.api.InstancePlugin): data[representation_id].append(instance_data) instance.data["scenedata"] = dict(data) - instance.data["hierarchy"] = list(set(hierarchy_nodes)) + instance.data["nodesHierarchy"] = list(set(hierarchy_nodes)) def get_file_rule(self, rule): return mel.eval('workspace -query -fileRuleEntry "{}"'.format(rule)) diff --git a/openpype/hosts/maya/plugins/publish/collect_inputs.py b/openpype/hosts/maya/plugins/publish/collect_inputs.py new file mode 100644 index 0000000000..470fceffc9 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/collect_inputs.py @@ -0,0 +1,215 @@ +import copy +from bson.objectid import ObjectId + +from maya import cmds +import maya.api.OpenMaya as om +import pyblish.api + +from openpype.pipeline import registered_host +from openpype.hosts.maya.api.lib import get_container_members +from openpype.hosts.maya.api.lib_rendersetup import get_shader_in_layer + + +def iter_history(nodes, + filter=om.MFn.kInvalid, + direction=om.MItDependencyGraph.kUpstream): + """Iterate unique upstream history for list of nodes. + + This acts as a replacement to maya.cmds.listHistory. + It's faster by about 2x-3x. It returns less than + maya.cmds.listHistory as it excludes the input nodes + from the output (unless an input node was history + for another input node). It also excludes duplicates. + + Args: + nodes (list): Maya node names to start search from. + filter (om.MFn.Type): Filter to only specific types. + e.g. to dag nodes using om.MFn.kDagNode + direction (om.MItDependencyGraph.Direction): Direction to traverse in. + Defaults to upstream. + + Yields: + str: Node names in upstream history. + + """ + if not nodes: + return + + sel = om.MSelectionList() + for node in nodes: + sel.add(node) + + it = om.MItDependencyGraph(sel.getDependNode(0)) # init iterator + handle = om.MObjectHandle + + traversed = set() + fn_dep = om.MFnDependencyNode() + fn_dag = om.MFnDagNode() + for i in range(sel.length()): + + start_node = sel.getDependNode(i) + start_node_hash = handle(start_node).hashCode() + if start_node_hash in traversed: + continue + + it.resetTo(start_node, + filter=filter, + direction=direction) + while not it.isDone(): + + node = it.currentNode() + node_hash = handle(node).hashCode() + + if node_hash in traversed: + it.prune() + it.next() # noqa: B305 + continue + + traversed.add(node_hash) + + if node.hasFn(om.MFn.kDagNode): + fn_dag.setObject(node) + yield fn_dag.fullPathName() + else: + fn_dep.setObject(node) + yield fn_dep.name() + + it.next() # noqa: B305 + + +def collect_input_containers(containers, nodes): + """Collect containers that contain any of the node in `nodes`. + + This will return any loaded Avalon container that contains at least one of + the nodes. As such, the Avalon container is an input for it. Or in short, + there are member nodes of that container. + + Returns: + list: Input avalon containers + + """ + # Assume the containers have collected their cached '_members' data + # in the collector. + return [container for container in containers + if any(node in container["_members"] for node in nodes)] + + +class CollectUpstreamInputs(pyblish.api.InstancePlugin): + """Collect input source inputs for this publish. + + This will include `inputs` data of which loaded publishes were used in the + generation of this publish. This leaves an upstream trace to what was used + as input. + + """ + + label = "Collect Inputs" + order = pyblish.api.CollectorOrder + 0.34 + hosts = ["maya"] + + def process(self, instance): + + # For large scenes the querying of "host.ls()" can be relatively slow + # e.g. up to a second. Many instances calling it easily slows this + # down. As such, we cache it so we trigger it only once. + # todo: Instead of hidden cache make "CollectContainers" plug-in + cache_key = "__cache_containers" + scene_containers = instance.context.data.get(cache_key, None) + if scene_containers is None: + # Query the scenes' containers if there's no cache yet + host = registered_host() + scene_containers = list(host.ls()) + for container in scene_containers: + # Embed the members into the container dictionary + container_members = set(get_container_members(container)) + container["_members"] = container_members + instance.context.data["__cache_containers"] = scene_containers + + # Collect the relevant input containers for this instance + if "renderlayer" in set(instance.data.get("families", [])): + # Special behavior for renderlayers + self.log.debug("Collecting renderlayer inputs....") + containers = self._collect_renderlayer_inputs(scene_containers, + instance) + + else: + # Basic behavior + nodes = instance[:] + + # Include any input connections of history with long names + # For optimization purposes only trace upstream from shape nodes + # looking for used dag nodes. This way having just a constraint + # on a transform is also ignored which tended to give irrelevant + # inputs for the majority of our use cases. We tend to care more + # about geometry inputs. + shapes = cmds.ls(nodes, + type=("mesh", "nurbsSurface", "nurbsCurve"), + noIntermediate=True) + if shapes: + history = list(iter_history(shapes, filter=om.MFn.kShape)) + history = cmds.ls(history, long=True) + + # Include the transforms in the collected history as shapes + # are excluded from containers + transforms = cmds.listRelatives(cmds.ls(history, shapes=True), + parent=True, + fullPath=True, + type="transform") + if transforms: + history.extend(transforms) + + if history: + nodes = list(set(nodes + history)) + + # Collect containers for the given set of nodes + containers = collect_input_containers(scene_containers, + nodes) + + inputs = [ObjectId(c["representation"]) for c in containers] + instance.data["inputRepresentations"] = inputs + + self.log.info("Collected inputs: %s" % inputs) + + def _collect_renderlayer_inputs(self, scene_containers, instance): + """Collects inputs from nodes in renderlayer, incl. shaders + camera""" + + # Get the renderlayer + renderlayer = instance.data.get("setMembers") + + if renderlayer == "defaultRenderLayer": + # Assume all loaded containers in the scene are inputs + # for the masterlayer + return copy.deepcopy(scene_containers) + else: + # Get the members of the layer + members = cmds.editRenderLayerMembers(renderlayer, + query=True, + fullNames=True) or [] + + # In some cases invalid objects are returned from + # `editRenderLayerMembers` so we filter them out + members = cmds.ls(members, long=True) + + # Include all children + children = cmds.listRelatives(members, + allDescendents=True, + fullPath=True) or [] + members.extend(children) + + # Include assigned shaders in renderlayer + shapes = cmds.ls(members, shapes=True, long=True) + shaders = set() + for shape in shapes: + shape_shaders = get_shader_in_layer(shape, layer=renderlayer) + if not shape_shaders: + continue + shaders.update(shape_shaders) + members.extend(shaders) + + # Explicitly include the camera being rendered in renderlayer + cameras = instance.data.get("cameras") + members.extend(cameras) + + containers = collect_input_containers(scene_containers, members) + + return containers diff --git a/openpype/hosts/maya/plugins/publish/collect_maya_scene.py b/openpype/hosts/maya/plugins/publish/collect_maya_scene.py deleted file mode 100644 index eb21b17989..0000000000 --- a/openpype/hosts/maya/plugins/publish/collect_maya_scene.py +++ /dev/null @@ -1,25 +0,0 @@ -from maya import cmds - -import pyblish.api - - -class CollectMayaScene(pyblish.api.InstancePlugin): - """Collect Maya Scene Data - - """ - - order = pyblish.api.CollectorOrder + 0.2 - label = 'Collect Model Data' - families = ["mayaScene"] - - def process(self, instance): - # Extract only current frame (override) - frame = cmds.currentTime(query=True) - instance.data["frameStart"] = frame - instance.data["frameEnd"] = frame - - # make ftrack publishable - if instance.data.get('families'): - instance.data['families'].append('ftrack') - else: - instance.data['families'] = ['ftrack'] diff --git a/openpype/hosts/maya/plugins/publish/collect_maya_scene_time.py b/openpype/hosts/maya/plugins/publish/collect_maya_scene_time.py new file mode 100644 index 0000000000..7e198df14d --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/collect_maya_scene_time.py @@ -0,0 +1,26 @@ +from maya import cmds + +import pyblish.api + + +class CollectMayaSceneTime(pyblish.api.InstancePlugin): + """Collect Maya Scene playback range + + This allows to reproduce the playback range for the content to be loaded. + It does *not* limit the extracted data to only data inside that time range. + + """ + + order = pyblish.api.CollectorOrder + 0.2 + label = 'Collect Maya Scene Time' + families = ["mayaScene"] + + def process(self, instance): + instance.data.update({ + "frameStart": cmds.playbackOptions(query=True, minTime=True), + "frameEnd": cmds.playbackOptions(query=True, maxTime=True), + "frameStartHandle": cmds.playbackOptions(query=True, + animationStartTime=True), + "frameEndHandle": cmds.playbackOptions(query=True, + animationEndTime=True) + }) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index ebda5e190d..b1ad3ca58e 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -102,23 +102,26 @@ class CollectMayaRender(pyblish.api.ContextPlugin): } for layer in collected_render_layers: - try: - if layer.startswith("LAYER_"): - # this is support for legacy mode where render layers - # started with `LAYER_` prefix. - expected_layer_name = re.search( - r"^LAYER_(.*)", layer).group(1) - else: - # new way is to prefix render layer name with instance - # namespace. - expected_layer_name = re.search( - r"^.+:(.*)", layer).group(1) - except IndexError: + if layer.startswith("LAYER_"): + # this is support for legacy mode where render layers + # started with `LAYER_` prefix. + layer_name_pattern = r"^LAYER_(.*)" + else: + # new way is to prefix render layer name with instance + # namespace. + layer_name_pattern = r"^.+:(.*)" + + # todo: We should have a more explicit way to link the renderlayer + match = re.match(layer_name_pattern, layer) + if not match: msg = "Invalid layer name in set [ {} ]".format(layer) self.log.warning(msg) continue - self.log.info("processing %s" % layer) + expected_layer_name = match.group(1) + self.log.info("Processing '{}' as layer [ {} ]" + "".format(layer, expected_layer_name)) + # check if layer is part of renderSetup if expected_layer_name not in maya_render_layers: msg = "Render layer [ {} ] is not in " "Render Setup".format( @@ -293,6 +296,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): "source": filepath, "expectedFiles": full_exp_files, "publishRenderMetadataFolder": common_publish_meta_path, + "renderProducts": layer_render_products, "resolutionWidth": lib.get_attr_in_layer( "defaultResolution.width", layer=layer_name ), @@ -359,7 +363,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): instance.data["label"] = label instance.data["farm"] = True instance.data.update(data) - self.log.debug("data: {}".format(json.dumps(data, indent=4))) def parse_options(self, render_globals): """Get all overrides with a value, skip those without. diff --git a/openpype/hosts/maya/plugins/publish/collect_rig.py b/openpype/hosts/maya/plugins/publish/collect_rig.py deleted file mode 100644 index 98ae1e8009..0000000000 --- a/openpype/hosts/maya/plugins/publish/collect_rig.py +++ /dev/null @@ -1,22 +0,0 @@ -from maya import cmds - -import pyblish.api - - -class CollectRigData(pyblish.api.InstancePlugin): - """Collect rig data - - Ensures rigs are published to Ftrack. - - """ - - order = pyblish.api.CollectorOrder + 0.2 - label = 'Collect Rig Data' - families = ["rig"] - - def process(self, instance): - # make ftrack publishable - if instance.data.get('families'): - instance.data['families'].append('ftrack') - else: - instance.data['families'] = ['ftrack'] diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index 760f410f91..5c21a4ff08 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -1,12 +1,12 @@ import os -import openpype.api - from maya import cmds + +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractAssStandin(openpype.api.Extractor): +class ExtractAssStandin(publish.Extractor): """Extract the content of the instance to a ass file Things to pay attention to: diff --git a/openpype/hosts/maya/plugins/publish/extract_assembly.py b/openpype/hosts/maya/plugins/publish/extract_assembly.py index 482930b76e..35932003ee 100644 --- a/openpype/hosts/maya/plugins/publish/extract_assembly.py +++ b/openpype/hosts/maya/plugins/publish/extract_assembly.py @@ -1,14 +1,13 @@ +import os import json -import os - -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import extract_alembic from maya import cmds -class ExtractAssembly(openpype.api.Extractor): +class ExtractAssembly(publish.Extractor): """Produce an alembic of just point positions and normals. Positions and normals are preserved, but nothing more, @@ -33,7 +32,7 @@ class ExtractAssembly(openpype.api.Extractor): json.dump(instance.data["scenedata"], filepath, ensure_ascii=False) self.log.info("Extracting point cache ..") - cmds.select(instance.data["hierarchy"]) + cmds.select(instance.data["nodesHierarchy"]) # Run basic alembic exporter extract_alembic(file=hierarchy_path, diff --git a/openpype/hosts/maya/plugins/publish/extract_assproxy.py b/openpype/hosts/maya/plugins/publish/extract_assproxy.py index 93720dbb82..4937a28a9e 100644 --- a/openpype/hosts/maya/plugins/publish/extract_assproxy.py +++ b/openpype/hosts/maya/plugins/publish/extract_assproxy.py @@ -3,17 +3,17 @@ import contextlib from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractAssProxy(openpype.api.Extractor): +class ExtractAssProxy(publish.Extractor): """Extract proxy model as Maya Ascii to use as arnold standin """ - order = openpype.api.Extractor.order + 0.2 + order = publish.Extractor.order + 0.2 label = "Ass Proxy (Maya ASCII)" hosts = ["maya"] families = ["ass"] diff --git a/openpype/hosts/maya/plugins/publish/extract_camera_alembic.py b/openpype/hosts/maya/plugins/publish/extract_camera_alembic.py index b744bfd0fe..aa445a0387 100644 --- a/openpype/hosts/maya/plugins/publish/extract_camera_alembic.py +++ b/openpype/hosts/maya/plugins/publish/extract_camera_alembic.py @@ -2,11 +2,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api import lib -class ExtractCameraAlembic(openpype.api.Extractor): +class ExtractCameraAlembic(publish.Extractor): """Extract a Camera as Alembic. The cameras gets baked to world space by default. Only when the instance's diff --git a/openpype/hosts/maya/plugins/publish/extract_camera_mayaScene.py b/openpype/hosts/maya/plugins/publish/extract_camera_mayaScene.py index 8d6c4b5f3c..7467fa027d 100644 --- a/openpype/hosts/maya/plugins/publish/extract_camera_mayaScene.py +++ b/openpype/hosts/maya/plugins/publish/extract_camera_mayaScene.py @@ -5,7 +5,7 @@ import itertools from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api import lib @@ -78,7 +78,7 @@ def unlock(plug): cmds.disconnectAttr(source, destination) -class ExtractCameraMayaScene(openpype.api.Extractor): +class ExtractCameraMayaScene(publish.Extractor): """Extract a Camera as Maya Scene. This will create a duplicate of the camera that will be baked *with* diff --git a/openpype/hosts/maya/plugins/publish/extract_fbx.py b/openpype/hosts/maya/plugins/publish/extract_fbx.py index fbbe8e06b0..9af3acef65 100644 --- a/openpype/hosts/maya/plugins/publish/extract_fbx.py +++ b/openpype/hosts/maya/plugins/publish/extract_fbx.py @@ -4,13 +4,13 @@ import os from maya import cmds # noqa import maya.mel as mel # noqa import pyblish.api -import openpype.api -from openpype.hosts.maya.api.lib import maintained_selection +from openpype.pipeline import publish +from openpype.hosts.maya.api.lib import maintained_selection from openpype.hosts.maya.api import fbx -class ExtractFBX(openpype.api.Extractor): +class ExtractFBX(publish.Extractor): """Extract FBX from Maya. This extracts reproducible FBX exports ignoring any of the diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index 991217684a..a801d99f42 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -5,13 +5,11 @@ import json from maya import cmds from maya.api import OpenMaya as om -from bson.objectid import ObjectId - -from openpype.pipeline import legacy_io -import openpype.api +from openpype.client import get_representation_by_id +from openpype.pipeline import legacy_io, publish -class ExtractLayout(openpype.api.Extractor): +class ExtractLayout(publish.Extractor): """Extract a layout.""" label = "Extract Layout" @@ -30,24 +28,27 @@ class ExtractLayout(openpype.api.Extractor): instance.data["representations"] = [] json_data = [] + # TODO representation queries can be refactored to be faster + project_name = legacy_io.active_project() for asset in cmds.sets(str(instance), query=True): # Find the container grp_name = asset.split(':')[0] - containers = cmds.ls(f"{grp_name}*_CON") + containers = cmds.ls("{}*_CON".format(grp_name)) assert len(containers) == 1, \ - f"More than one container found for {asset}" + "More than one container found for {}".format(asset) container = containers[0] - representation_id = cmds.getAttr(f"{container}.representation") + representation_id = cmds.getAttr( + "{}.representation".format(container)) - representation = legacy_io.find_one( - { - "type": "representation", - "_id": ObjectId(representation_id) - }, projection={"parent": True, "context.family": True}) + representation = get_representation_by_id( + project_name, + representation_id, + fields=["parent", "context.family"] + ) self.log.info(representation) @@ -56,7 +57,8 @@ class ExtractLayout(openpype.api.Extractor): json_element = { "family": family, - "instance_name": cmds.getAttr(f"{container}.name"), + "instance_name": cmds.getAttr( + "{}.namespace".format(container)), "representation": str(representation_id), "version": str(version_id) } @@ -102,9 +104,10 @@ class ExtractLayout(openpype.api.Extractor): for i in range(0, len(t_matrix_list), row_length): t_matrix.append(t_matrix_list[i:i + row_length]) - json_element["transform_matrix"] = [] - for row in t_matrix: - json_element["transform_matrix"].append(list(row)) + json_element["transform_matrix"] = [ + list(row) + for row in t_matrix + ] basis_list = [ 1, 0, 0, 0, diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index ce3b265566..403b4ee6bc 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -13,8 +13,8 @@ from maya import cmds # noqa import pyblish.api -import openpype.api -from openpype.pipeline import legacy_io +from openpype.lib import source_hash, run_subprocess +from openpype.pipeline import legacy_io, publish from openpype.hosts.maya.api import lib # Modes for transfer @@ -68,7 +68,7 @@ def find_paths_by_hash(texture_hash): return legacy_io.distinct(key, {"type": "version"}) -def maketx(source, destination, *args): +def maketx(source, destination, args, logger): """Make `.tx` using `maketx` with some default settings. The settings are based on default as used in Arnold's @@ -79,7 +79,8 @@ def maketx(source, destination, *args): Args: source (str): Path to source file. destination (str): Writing destination path. - *args: Additional arguments for `maketx`. + args (list): Additional arguments for `maketx`. + logger (logging.Logger): Logger to log messages to. Returns: str: Output of `maketx` command. @@ -94,7 +95,7 @@ def maketx(source, destination, *args): "OIIO tool not found in {}".format(maketx_path)) raise AssertionError("OIIO tool not found") - cmd = [ + subprocess_args = [ maketx_path, "-v", # verbose "-u", # update mode @@ -103,27 +104,20 @@ def maketx(source, destination, *args): "--checknan", # use oiio-optimized settings for tile-size, planarconfig, metadata "--oiio", - "--filter lanczos3", - escape_space(source) + "--filter", "lanczos3", + source ] - cmd.extend(args) - cmd.extend(["-o", escape_space(destination)]) + subprocess_args.extend(args) + subprocess_args.extend(["-o", destination]) - cmd = " ".join(cmd) + cmd = " ".join(subprocess_args) + logger.debug(cmd) - CREATE_NO_WINDOW = 0x08000000 # noqa - kwargs = dict(args=cmd, stderr=subprocess.STDOUT) - - if sys.platform == "win32": - kwargs["creationflags"] = CREATE_NO_WINDOW try: - out = subprocess.check_output(**kwargs) - except subprocess.CalledProcessError as exc: - print(exc) - import traceback - - traceback.print_exc() + out = run_subprocess(subprocess_args) + except Exception: + logger.error("Maketx converion failed", exc_info=True) raise return out @@ -161,7 +155,7 @@ def no_workspace_dir(): os.rmdir(fake_workspace_dir) -class ExtractLook(openpype.api.Extractor): +class ExtractLook(publish.Extractor): """Extract Look (Maya Scene + JSON) Only extracts the sets (shadingEngines and alike) alongside a .json file @@ -505,7 +499,7 @@ class ExtractLook(openpype.api.Extractor): args = [] if do_maketx: args.append("maketx") - texture_hash = openpype.api.source_hash(filepath, *args) + texture_hash = source_hash(filepath, *args) # If source has been published before with the same settings, # then don't reprocess but hardlink from the original @@ -524,15 +518,17 @@ class ExtractLook(openpype.api.Extractor): if do_maketx and ext != ".tx": # Produce .tx file in staging if source file is not .tx converted = os.path.join(staging, "resources", fname + ".tx") - + additional_args = [ + "--sattrib", + "sourceHash", + texture_hash + ] if linearize: self.log.info("tx: converting sRGB -> linear") - colorconvert = "--colorconvert sRGB linear" - else: - colorconvert = "" + additional_args.extend(["--colorconvert", "sRGB", "linear"]) config_path = get_ocio_config_path("nuke-default") - color_config = "--colorconfig {0}".format(config_path) + additional_args.extend(["--colorconfig", config_path]) # Ensure folder exists if not os.path.exists(os.path.dirname(converted)): os.makedirs(os.path.dirname(converted)) @@ -541,12 +537,8 @@ class ExtractLook(openpype.api.Extractor): maketx( filepath, converted, - # Include `source-hash` as string metadata - "--sattrib", - "sourceHash", - escape_space(texture_hash), - colorconvert, - color_config + additional_args, + self.log ) return converted, COPY, texture_hash diff --git a/openpype/hosts/maya/plugins/publish/extract_maya_scene_raw.py b/openpype/hosts/maya/plugins/publish/extract_maya_scene_raw.py index 3a47cdadb5..3769ec3605 100644 --- a/openpype/hosts/maya/plugins/publish/extract_maya_scene_raw.py +++ b/openpype/hosts/maya/plugins/publish/extract_maya_scene_raw.py @@ -4,12 +4,11 @@ import os from maya import cmds -import openpype.api from openpype.hosts.maya.api.lib import maintained_selection -from openpype.pipeline import AVALON_CONTAINER_ID +from openpype.pipeline import AVALON_CONTAINER_ID, publish -class ExtractMayaSceneRaw(openpype.api.Extractor): +class ExtractMayaSceneRaw(publish.Extractor): """Extract as Maya Scene (raw). This will preserve all references, construction history, etc. diff --git a/openpype/hosts/maya/plugins/publish/extract_model.py b/openpype/hosts/maya/plugins/publish/extract_model.py index 0282d1e9c8..7c8c3a2981 100644 --- a/openpype/hosts/maya/plugins/publish/extract_model.py +++ b/openpype/hosts/maya/plugins/publish/extract_model.py @@ -4,11 +4,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api import lib -class ExtractModel(openpype.api.Extractor): +class ExtractModel(publish.Extractor): """Extract as Model (Maya Scene). Only extracts contents based on the original "setMembers" data to ensure diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py index 82e2b41929..92137acb95 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py @@ -2,11 +2,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractMultiverseLook(openpype.api.Extractor): +class ExtractMultiverseLook(publish.Extractor): """Extractor for Multiverse USD look data. This will extract: diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index 3654be7b34..6c352bebe6 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -3,11 +3,11 @@ import six from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractMultiverseUsd(openpype.api.Extractor): +class ExtractMultiverseUsd(publish.Extractor): """Extractor for Multiverse USD Asset data. This will extract settings for a Multiverse Write Asset operation: diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py index ad9303657f..a62729c198 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py @@ -2,11 +2,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractMultiverseUsdComposition(openpype.api.Extractor): +class ExtractMultiverseUsdComposition(publish.Extractor): """Extractor of Multiverse USD Composition data. This will extract settings for a Multiverse Write Composition operation: diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py index d44e3878b8..0628623e88 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py @@ -1,12 +1,12 @@ import os -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection from maya import cmds -class ExtractMultiverseUsdOverride(openpype.api.Extractor): +class ExtractMultiverseUsdOverride(publish.Extractor): """Extractor for Multiverse USD Override data. This will extract settings for a Multiverse Write Override operation: diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index 54ef09e060..1b5b8d34e4 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -1,18 +1,16 @@ import os -import glob -import contextlib import clique import capture +from openpype.pipeline import publish from openpype.hosts.maya.api import lib -import openpype.api from maya import cmds import pymel.core as pm -class ExtractPlayblast(openpype.api.Extractor): +class ExtractPlayblast(publish.Extractor): """Extract viewport playblast. Takes review camera and creates review Quicktime video based on viewport @@ -79,8 +77,10 @@ class ExtractPlayblast(openpype.api.Extractor): preset['height'] = asset_height preset['start_frame'] = start preset['end_frame'] = end - camera_option = preset.get("camera_option", {}) - camera_option["depthOfField"] = cmds.getAttr( + + # Enforce persisting camera depth of field + camera_options = preset.setdefault("camera_options", {}) + camera_options["depthOfField"] = cmds.getAttr( "{0}.depthOfField".format(camera)) stagingdir = self.staging_dir(instance) @@ -128,7 +128,7 @@ class ExtractPlayblast(openpype.api.Extractor): # Update preset with current panel setting # if override_viewport_options is turned off if not override_viewport_options: - panel = cmds.getPanel(with_focus=True) + panel = cmds.getPanel(withFocus=True) panel_preset = capture.parse_active_view() preset.update(panel_preset) cmds.setFocus(panel) @@ -138,8 +138,10 @@ class ExtractPlayblast(openpype.api.Extractor): self.log.debug("playblast path {}".format(path)) collected_files = os.listdir(stagingdir) + patterns = [clique.PATTERNS["frames"]] collections, remainder = clique.assemble(collected_files, - minimum_items=1) + minimum_items=1, + patterns=patterns) self.log.debug("filename {}".format(filename)) frame_collection = None diff --git a/openpype/hosts/maya/plugins/publish/extract_pointcache.py b/openpype/hosts/maya/plugins/publish/extract_pointcache.py index bf6feecef3..7c1c6d5c12 100644 --- a/openpype/hosts/maya/plugins/publish/extract_pointcache.py +++ b/openpype/hosts/maya/plugins/publish/extract_pointcache.py @@ -2,7 +2,7 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import ( extract_alembic, suspended_refresh, @@ -11,7 +11,7 @@ from openpype.hosts.maya.api.lib import ( ) -class ExtractAlembic(openpype.api.Extractor): +class ExtractAlembic(publish.Extractor): """Produce an alembic of just point positions and normals. Positions and normals, uvs, creases are preserved, but nothing more, diff --git a/openpype/hosts/maya/plugins/publish/extract_redshift_proxy.py b/openpype/hosts/maya/plugins/publish/extract_redshift_proxy.py index 23cac9190d..4377275635 100644 --- a/openpype/hosts/maya/plugins/publish/extract_redshift_proxy.py +++ b/openpype/hosts/maya/plugins/publish/extract_redshift_proxy.py @@ -4,11 +4,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractRedshiftProxy(openpype.api.Extractor): +class ExtractRedshiftProxy(publish.Extractor): """Extract the content of the instance to a redshift proxy file.""" label = "Redshift Proxy (.rs)" diff --git a/openpype/hosts/maya/plugins/publish/extract_rendersetup.py b/openpype/hosts/maya/plugins/publish/extract_rendersetup.py index 6bdd5f590e..5970c038a4 100644 --- a/openpype/hosts/maya/plugins/publish/extract_rendersetup.py +++ b/openpype/hosts/maya/plugins/publish/extract_rendersetup.py @@ -1,10 +1,11 @@ -import json import os -import openpype.api +import json + import maya.app.renderSetup.model.renderSetup as renderSetup +from openpype.pipeline import publish -class ExtractRenderSetup(openpype.api.Extractor): +class ExtractRenderSetup(publish.Extractor): """ Produce renderSetup template file diff --git a/openpype/hosts/maya/plugins/publish/extract_rig.py b/openpype/hosts/maya/plugins/publish/extract_rig.py index 53c1eeb671..c71a2f710d 100644 --- a/openpype/hosts/maya/plugins/publish/extract_rig.py +++ b/openpype/hosts/maya/plugins/publish/extract_rig.py @@ -4,11 +4,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractRig(openpype.api.Extractor): +class ExtractRig(publish.Extractor): """Extract rig as Maya Scene.""" label = "Extract Rig (Maya Scene)" diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 01980578cf..712159c2be 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -1,16 +1,17 @@ import os import glob +import tempfile import capture +from openpype.pipeline import publish from openpype.hosts.maya.api import lib -import openpype.api from maya import cmds import pymel.core as pm -class ExtractThumbnail(openpype.api.Extractor): +class ExtractThumbnail(publish.Extractor): """Extract viewport thumbnail. Takes review camera and creates a thumbnail based on viewport @@ -81,9 +82,17 @@ class ExtractThumbnail(openpype.api.Extractor): elif asset_width and asset_height: preset['width'] = asset_width preset['height'] = asset_height - stagingDir = self.staging_dir(instance) + + # Create temp directory for thumbnail + # - this is to avoid "override" of source file + dst_staging = tempfile.mkdtemp(prefix="pyblish_tmp_") + self.log.debug( + "Create temp directory {} for thumbnail".format(dst_staging) + ) + # Store new staging to cleanup paths + instance.context.data["cleanupFullPaths"].append(dst_staging) filename = "{0}".format(instance.name) - path = os.path.join(stagingDir, filename) + path = os.path.join(dst_staging, filename) self.log.info("Outputting images to %s" % path) @@ -100,9 +109,9 @@ class ExtractThumbnail(openpype.api.Extractor): # camera. if preset.pop("isolate_view", False) and instance.data.get("isolate"): preset["isolate"] = instance.data["setMembers"] - + # Show or Hide Image Plane - image_plane = instance.data.get("imagePlane", True) + image_plane = instance.data.get("imagePlane", True) if "viewport_options" in preset: preset["viewport_options"]["imagePlane"] = image_plane else: @@ -117,7 +126,7 @@ class ExtractThumbnail(openpype.api.Extractor): # Update preset with current panel setting # if override_viewport_options is turned off if not override_viewport_options: - panel = cmds.getPanel(with_focus=True) + panel = cmds.getPanel(withFocus=True) panel_preset = capture.parse_active_view() preset.update(panel_preset) cmds.setFocus(panel) @@ -137,7 +146,7 @@ class ExtractThumbnail(openpype.api.Extractor): 'name': 'thumbnail', 'ext': 'jpg', 'files': thumbnail, - "stagingDir": stagingDir, + "stagingDir": dst_staging, "thumbnail": True } instance.data["representations"].append(representation) diff --git a/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py index 7ef7f2f181..258120db2f 100644 --- a/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py +++ b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py @@ -6,7 +6,8 @@ from contextlib import contextmanager from maya import cmds # noqa import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.maya.api import fbx @@ -20,7 +21,7 @@ def renamed(original_name, renamed_name): cmds.rename(renamed_name, original_name) -class ExtractUnrealSkeletalMesh(openpype.api.Extractor): +class ExtractUnrealSkeletalMesh(publish.Extractor): """Extract Unreal Skeletal Mesh as FBX from Maya. """ order = pyblish.api.ExtractorOrder - 0.1 diff --git a/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py b/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py index 69d51f9ff1..44f0615a27 100644 --- a/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py +++ b/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py @@ -5,7 +5,8 @@ import os from maya import cmds # noqa import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import ( parent_nodes, maintained_selection @@ -13,7 +14,7 @@ from openpype.hosts.maya.api.lib import ( from openpype.hosts.maya.api import fbx -class ExtractUnrealStaticMesh(openpype.api.Extractor): +class ExtractUnrealStaticMesh(publish.Extractor): """Extract Unreal Static Mesh as FBX from Maya. """ order = pyblish.api.ExtractorOrder - 0.1 diff --git a/openpype/hosts/maya/plugins/publish/extract_vrayproxy.py b/openpype/hosts/maya/plugins/publish/extract_vrayproxy.py index 562ca078e1..38bf02245a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_vrayproxy.py +++ b/openpype/hosts/maya/plugins/publish/extract_vrayproxy.py @@ -2,11 +2,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractVRayProxy(openpype.api.Extractor): +class ExtractVRayProxy(publish.Extractor): """Extract the content of the instance to a vrmesh file Things to pay attention to: diff --git a/openpype/hosts/maya/plugins/publish/extract_vrayscene.py b/openpype/hosts/maya/plugins/publish/extract_vrayscene.py index 5d41697e5f..8442df1611 100644 --- a/openpype/hosts/maya/plugins/publish/extract_vrayscene.py +++ b/openpype/hosts/maya/plugins/publish/extract_vrayscene.py @@ -3,14 +3,14 @@ import os import re -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.render_setup_tools import export_in_rs_layer from openpype.hosts.maya.api.lib import maintained_selection from maya import cmds -class ExtractVrayscene(openpype.api.Extractor): +class ExtractVrayscene(publish.Extractor): """Extractor for vrscene.""" label = "VRay Scene (.vrscene)" diff --git a/openpype/hosts/maya/plugins/publish/extract_xgen_cache.py b/openpype/hosts/maya/plugins/publish/extract_xgen_cache.py index 5728682abe..77350f343e 100644 --- a/openpype/hosts/maya/plugins/publish/extract_xgen_cache.py +++ b/openpype/hosts/maya/plugins/publish/extract_xgen_cache.py @@ -2,14 +2,14 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import ( suspended_refresh, maintained_selection ) -class ExtractXgenCache(openpype.api.Extractor): +class ExtractXgenCache(publish.Extractor): """Produce an alembic of just xgen interactive groom """ diff --git a/openpype/hosts/maya/plugins/publish/extract_yeti_cache.py b/openpype/hosts/maya/plugins/publish/extract_yeti_cache.py index cf6db00e9a..b61f599cab 100644 --- a/openpype/hosts/maya/plugins/publish/extract_yeti_cache.py +++ b/openpype/hosts/maya/plugins/publish/extract_yeti_cache.py @@ -3,10 +3,10 @@ import json from maya import cmds -import openpype.api +from openpype.pipeline import publish -class ExtractYetiCache(openpype.api.Extractor): +class ExtractYetiCache(publish.Extractor): """Producing Yeti cache files using scene time range. This will extract Yeti cache file sequence and fur settings. diff --git a/openpype/hosts/maya/plugins/publish/extract_yeti_rig.py b/openpype/hosts/maya/plugins/publish/extract_yeti_rig.py index 6e21bffa4e..1d0c5e88c3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_yeti_rig.py +++ b/openpype/hosts/maya/plugins/publish/extract_yeti_rig.py @@ -7,7 +7,7 @@ import contextlib from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api import lib @@ -90,7 +90,7 @@ def yetigraph_attribute_values(assumed_destination, resources): pass -class ExtractYetiRig(openpype.api.Extractor): +class ExtractYetiRig(publish.Extractor): """Extract the Yeti rig to a Maya Scene and write the Yeti rig data.""" label = "Extract Yeti Rig" diff --git a/openpype/hosts/maya/plugins/publish/increment_current_file_deadline.py b/openpype/hosts/maya/plugins/publish/increment_current_file_deadline.py index f9cfac3eb9..b5d5847e9f 100644 --- a/openpype/hosts/maya/plugins/publish/increment_current_file_deadline.py +++ b/openpype/hosts/maya/plugins/publish/increment_current_file_deadline.py @@ -16,12 +16,11 @@ class IncrementCurrentFileDeadline(pyblish.api.ContextPlugin): def process(self, context): - import os from maya import cmds from openpype.lib import version_up - from openpype.action import get_errored_plugins_from_data + from openpype.pipeline.publish import get_errored_plugins_from_context - errored_plugins = get_errored_plugins_from_data(context) + errored_plugins = get_errored_plugins_from_context(context) if any(plugin.__name__ == "MayaSubmitDeadline" for plugin in errored_plugins): raise RuntimeError("Skipping incrementing current file because " diff --git a/openpype/hosts/maya/plugins/publish/save_scene.py b/openpype/hosts/maya/plugins/publish/save_scene.py index 50a2f2112a..45e62e7b44 100644 --- a/openpype/hosts/maya/plugins/publish/save_scene.py +++ b/openpype/hosts/maya/plugins/publish/save_scene.py @@ -1,4 +1,8 @@ import pyblish.api +from openpype.pipeline.workfile.lock_workfile import ( + is_workfile_lock_enabled, + remove_workfile_lock +) class SaveCurrentScene(pyblish.api.ContextPlugin): @@ -22,6 +26,10 @@ class SaveCurrentScene(pyblish.api.ContextPlugin): self.log.debug("Skipping file save as there " "are no modifications..") return - + project_name = context.data["projectName"] + project_settings = context.data["project_settings"] + # remove lockfile before saving + if is_workfile_lock_enabled("maya", project_name, project_settings): + remove_workfile_lock(current) self.log.info("Saving current file..") cmds.file(save=True, force=True) diff --git a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py index c4250a20bd..1a6463fb9d 100644 --- a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py +++ b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py @@ -11,7 +11,7 @@ import pyblish.api from openpype.lib import requests_post from openpype.hosts.maya.api import lib from openpype.pipeline import legacy_io -from openpype.api import get_system_settings +from openpype.settings import get_system_settings # mapping between Maya renderer names and Muster template ids @@ -118,7 +118,7 @@ def preview_fname(folder, scene, layer, padding, ext): """ # Following hardcoded "/_/" - output = "maya/{scene}/{layer}/{layer}.{number}.{ext}".format( + output = "{scene}/{layer}/{layer}.{number}.{ext}".format( scene=scene, layer=layer, number="#" * padding, diff --git a/openpype/hosts/maya/plugins/publish/validate_animation_content.py b/openpype/hosts/maya/plugins/publish/validate_animation_content.py index 7638c44b87..9dbb09a046 100644 --- a/openpype/hosts/maya/plugins/publish/validate_animation_content.py +++ b/openpype/hosts/maya/plugins/publish/validate_animation_content.py @@ -1,6 +1,6 @@ import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateAnimationContent(pyblish.api.InstancePlugin): @@ -11,7 +11,7 @@ class ValidateAnimationContent(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["animation"] label = "Animation Content" diff --git a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py index 05d63f1d56..649913fff6 100644 --- a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py @@ -1,9 +1,12 @@ import maya.cmds as cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin): @@ -16,13 +19,13 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['animation', "pointcache"] hosts = ['maya'] label = 'Animation Out Set Related Node Ids' actions = [ openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction + RepairAction ] def process(self, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_ass_relative_paths.py b/openpype/hosts/maya/plugins/publish/validate_ass_relative_paths.py index 5fb9bd98b1..ac6ce4d22d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_ass_relative_paths.py +++ b/openpype/hosts/maya/plugins/publish/validate_ass_relative_paths.py @@ -4,18 +4,20 @@ import types import maya.cmds as cmds import pyblish.api -import openpype.api -import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateAssRelativePaths(pyblish.api.InstancePlugin): """Ensure exporting ass file has set relative texture paths""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['ass'] label = "ASS has relative texture paths" - actions = [openpype.api.RepairAction] + actions = [RepairAction] def process(self, instance): # we cannot ask this until user open render settings as diff --git a/openpype/hosts/maya/plugins/publish/validate_assembly_namespaces.py b/openpype/hosts/maya/plugins/publish/validate_assembly_namespaces.py index a9ea5a6d15..229da63c42 100644 --- a/openpype/hosts/maya/plugins/publish/validate_assembly_namespaces.py +++ b/openpype/hosts/maya/plugins/publish/validate_assembly_namespaces.py @@ -1,5 +1,4 @@ import pyblish.api -import openpype.api import openpype.hosts.maya.api.action diff --git a/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py b/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py index dca59b147b..3f2c59b95b 100644 --- a/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py +++ b/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py @@ -1,9 +1,9 @@ import pyblish.api -import openpype.api from maya import cmds import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairAction class ValidateAssemblyModelTransforms(pyblish.api.InstancePlugin): @@ -29,7 +29,7 @@ class ValidateAssemblyModelTransforms(pyblish.api.InstancePlugin): label = "Assembly Model Transforms" families = ["assembly"] actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] prompt_message = ("You are about to reset the matrix to the default values." " This can alter the look of your scene. " @@ -47,7 +47,7 @@ class ValidateAssemblyModelTransforms(pyblish.api.InstancePlugin): from openpype.hosts.maya.api import lib # Get all transforms in the loaded containers - container_roots = cmds.listRelatives(instance.data["hierarchy"], + container_roots = cmds.listRelatives(instance.data["nodesHierarchy"], children=True, type="transform", fullPath=True) diff --git a/openpype/hosts/maya/plugins/publish/validate_attributes.py b/openpype/hosts/maya/plugins/publish/validate_attributes.py index e2a22f80b6..136c38bc1d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_attributes.py +++ b/openpype/hosts/maya/plugins/publish/validate_attributes.py @@ -1,7 +1,10 @@ import pymel.core as pm import pyblish.api -import openpype.api +from openpype.pipeline.publish import ( + RepairContextAction, + ValidateContentsOrder, +) class ValidateAttributes(pyblish.api.ContextPlugin): @@ -16,10 +19,10 @@ class ValidateAttributes(pyblish.api.ContextPlugin): } """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Attributes" hosts = ["maya"] - actions = [openpype.api.RepairContextAction] + actions = [RepairContextAction] optional = True attributes = None diff --git a/openpype/hosts/maya/plugins/publish/validate_camera_attributes.py b/openpype/hosts/maya/plugins/publish/validate_camera_attributes.py index e019788aff..bd1529e252 100644 --- a/openpype/hosts/maya/plugins/publish/validate_camera_attributes.py +++ b/openpype/hosts/maya/plugins/publish/validate_camera_attributes.py @@ -1,8 +1,8 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateCameraAttributes(pyblish.api.InstancePlugin): @@ -14,7 +14,7 @@ class ValidateCameraAttributes(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['camera'] hosts = ['maya'] label = 'Camera Attributes' diff --git a/openpype/hosts/maya/plugins/publish/validate_camera_contents.py b/openpype/hosts/maya/plugins/publish/validate_camera_contents.py index 5f6faddbe7..1ce8026fc2 100644 --- a/openpype/hosts/maya/plugins/publish/validate_camera_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_camera_contents.py @@ -1,8 +1,8 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateCameraContents(pyblish.api.InstancePlugin): @@ -15,7 +15,7 @@ class ValidateCameraContents(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['camera'] hosts = ['maya'] label = 'Camera Contents' diff --git a/openpype/hosts/maya/plugins/publish/validate_color_sets.py b/openpype/hosts/maya/plugins/publish/validate_color_sets.py index 45224b0672..905417bafa 100644 --- a/openpype/hosts/maya/plugins/publish/validate_color_sets.py +++ b/openpype/hosts/maya/plugins/publish/validate_color_sets.py @@ -1,8 +1,11 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, +) class ValidateColorSets(pyblish.api.Validator): @@ -13,13 +16,13 @@ class ValidateColorSets(pyblish.api.Validator): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] category = 'geometry' label = 'Mesh ColorSets' actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] optional = True @staticmethod diff --git a/openpype/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py b/openpype/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py index 3c3ea68fc6..f072e5e323 100644 --- a/openpype/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py +++ b/openpype/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py @@ -1,7 +1,7 @@ import pyblish.api from maya import cmds -from openpype.plugin import contextplugin_should_run +from openpype.pipeline.publish import context_plugin_should_run class ValidateCurrentRenderLayerIsRenderable(pyblish.api.ContextPlugin): @@ -24,7 +24,7 @@ class ValidateCurrentRenderLayerIsRenderable(pyblish.api.ContextPlugin): def process(self, context): # Workaround bug pyblish-base#250 - if not contextplugin_should_run(self, context): + if not context_plugin_should_run(self, context): return layer = cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True) diff --git a/openpype/hosts/maya/plugins/publish/validate_cycle_error.py b/openpype/hosts/maya/plugins/publish/validate_cycle_error.py index 4dfe0b8add..210ee4127c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_cycle_error.py +++ b/openpype/hosts/maya/plugins/publish/validate_cycle_error.py @@ -2,15 +2,15 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api.lib import maintained_selection +from openpype.pipeline.publish import ValidateContentsOrder class ValidateCycleError(pyblish.api.InstancePlugin): """Validate nodes produce no cycle errors.""" - order = openpype.api.ValidateContentsOrder + 0.05 + order = ValidateContentsOrder + 0.05 label = "Cycle Errors" hosts = ["maya"] families = ["rig"] diff --git a/openpype/hosts/maya/plugins/publish/validate_frame_range.py b/openpype/hosts/maya/plugins/publish/validate_frame_range.py index c51766379e..b467a7c232 100644 --- a/openpype/hosts/maya/plugins/publish/validate_frame_range.py +++ b/openpype/hosts/maya/plugins/publish/validate_frame_range.py @@ -1,7 +1,10 @@ import pyblish.api -import openpype.api from maya import cmds +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateFrameRange(pyblish.api.InstancePlugin): @@ -18,7 +21,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): """ label = "Validate Frame Range" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["animation", "pointcache", "camera", @@ -26,7 +29,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): "review", "yeticache"] optional = True - actions = [openpype.api.RepairAction] + actions = [RepairAction] exclude_families = [] def process(self, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py b/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py index e04a26e4fd..4870f27bff 100644 --- a/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py +++ b/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py @@ -1,12 +1,12 @@ import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateInstanceHasMembers(pyblish.api.InstancePlugin): """Validates instance objectSet has *any* members.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] label = 'Instance has members' actions = [openpype.hosts.maya.api.action.SelectInvalidAction] diff --git a/openpype/hosts/maya/plugins/publish/validate_instance_in_context.py b/openpype/hosts/maya/plugins/publish/validate_instance_in_context.py index 7b8c335062..41bb414829 100644 --- a/openpype/hosts/maya/plugins/publish/validate_instance_in_context.py +++ b/openpype/hosts/maya/plugins/publish/validate_instance_in_context.py @@ -3,7 +3,7 @@ from __future__ import absolute_import import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder from maya import cmds @@ -98,7 +98,7 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin): Action on this validator will select invalid instances in Outliner. """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Instance in same Context" optional = True hosts = ["maya"] diff --git a/openpype/hosts/maya/plugins/publish/validate_instance_subset.py b/openpype/hosts/maya/plugins/publish/validate_instance_subset.py index 539f3f9d3c..bb3dde761c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_instance_subset.py +++ b/openpype/hosts/maya/plugins/publish/validate_instance_subset.py @@ -1,8 +1,8 @@ import pyblish.api -import openpype.api import string import six +from openpype.pipeline.publish import ValidateContentsOrder # Allow only characters, numbers and underscore allowed = set(string.ascii_lowercase + @@ -18,7 +18,7 @@ def validate_name(subset): class ValidateSubsetName(pyblish.api.InstancePlugin): """Validates subset name has only valid characters""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["*"] label = "Subset Name" diff --git a/openpype/hosts/maya/plugins/publish/validate_loaded_plugin.py b/openpype/hosts/maya/plugins/publish/validate_loaded_plugin.py index 9306d8ce15..624074aaf9 100644 --- a/openpype/hosts/maya/plugins/publish/validate_loaded_plugin.py +++ b/openpype/hosts/maya/plugins/publish/validate_loaded_plugin.py @@ -1,7 +1,8 @@ +import os import pyblish.api import maya.cmds as cmds -import openpype.api -import os + +from openpype.pipeline.publish import RepairContextAction class ValidateLoadedPlugin(pyblish.api.ContextPlugin): @@ -10,7 +11,7 @@ class ValidateLoadedPlugin(pyblish.api.ContextPlugin): label = "Loaded Plugin" order = pyblish.api.ValidatorOrder host = ["maya"] - actions = [openpype.api.RepairContextAction] + actions = [RepairContextAction] @classmethod def get_invalid(cls, context): diff --git a/openpype/hosts/maya/plugins/publish/validate_look_contents.py b/openpype/hosts/maya/plugins/publish/validate_look_contents.py index b1e1d5416b..53501d11e5 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_contents.py @@ -1,6 +1,6 @@ import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateLookContents(pyblish.api.InstancePlugin): @@ -17,7 +17,7 @@ class ValidateLookContents(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['look'] hosts = ['maya'] label = 'Look Data Contents' diff --git a/openpype/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py b/openpype/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py index 262dd10b74..20f561a892 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py @@ -1,7 +1,7 @@ from maya import cmds import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateLookDefaultShadersConnections(pyblish.api.InstancePlugin): @@ -16,7 +16,7 @@ class ValidateLookDefaultShadersConnections(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['look'] hosts = ['maya'] label = 'Look Default Shader Connections' diff --git a/openpype/hosts/maya/plugins/publish/validate_look_id_reference_edits.py b/openpype/hosts/maya/plugins/publish/validate_look_id_reference_edits.py index 9d074f927b..a266a0fd74 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_id_reference_edits.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_id_reference_edits.py @@ -2,8 +2,11 @@ from collections import defaultdict from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateLookIdReferenceEdits(pyblish.api.InstancePlugin): @@ -16,12 +19,12 @@ class ValidateLookIdReferenceEdits(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['look'] hosts = ['maya'] label = 'Look Id Reference Edits' actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] def process(self, instance): invalid = self.get_invalid(instance) diff --git a/openpype/hosts/maya/plugins/publish/validate_look_members_unique.py b/openpype/hosts/maya/plugins/publish/validate_look_members_unique.py index 2367602d05..f81e511ff3 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_members_unique.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_members_unique.py @@ -1,8 +1,8 @@ from collections import defaultdict import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidatePipelineOrder class ValidateUniqueRelationshipMembers(pyblish.api.InstancePlugin): @@ -20,7 +20,7 @@ class ValidateUniqueRelationshipMembers(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidatePipelineOrder + order = ValidatePipelineOrder label = 'Look members unique' hosts = ['maya'] families = ['look'] diff --git a/openpype/hosts/maya/plugins/publish/validate_look_no_default_shaders.py b/openpype/hosts/maya/plugins/publish/validate_look_no_default_shaders.py index 8ba6cde988..db6aadae8d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_no_default_shaders.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_no_default_shaders.py @@ -1,8 +1,8 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateLookNoDefaultShaders(pyblish.api.InstancePlugin): @@ -23,7 +23,7 @@ class ValidateLookNoDefaultShaders(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + 0.01 + order = ValidateContentsOrder + 0.01 families = ['look'] hosts = ['maya'] label = 'Look No Default Shaders' diff --git a/openpype/hosts/maya/plugins/publish/validate_look_sets.py b/openpype/hosts/maya/plugins/publish/validate_look_sets.py index 5e737ca876..8434ddde04 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_sets.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_sets.py @@ -1,8 +1,7 @@ +import pyblish.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib - -import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateLookSets(pyblish.api.InstancePlugin): @@ -38,7 +37,7 @@ class ValidateLookSets(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['look'] hosts = ['maya'] label = 'Look Sets' diff --git a/openpype/hosts/maya/plugins/publish/validate_look_shading_group.py b/openpype/hosts/maya/plugins/publish/validate_look_shading_group.py index e8affac036..9b57b06ee7 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_shading_group.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_shading_group.py @@ -1,8 +1,11 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateShadingEngine(pyblish.api.InstancePlugin): @@ -11,12 +14,12 @@ class ValidateShadingEngine(pyblish.api.InstancePlugin): Shading engines should be named "{surface_shader}SG" """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["look"] hosts = ["maya"] label = "Look Shading Engine Naming" actions = [ - openpype.hosts.maya.api.action.SelectInvalidAction, openpype.api.RepairAction + openpype.hosts.maya.api.action.SelectInvalidAction, RepairAction ] # The default connections to check diff --git a/openpype/hosts/maya/plugins/publish/validate_look_single_shader.py b/openpype/hosts/maya/plugins/publish/validate_look_single_shader.py index 2b32ccf492..788e440d12 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_single_shader.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_single_shader.py @@ -1,8 +1,8 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateSingleShader(pyblish.api.InstancePlugin): @@ -12,7 +12,7 @@ class ValidateSingleShader(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['look'] hosts = ['maya'] label = 'Look Single Shader Per Shape' diff --git a/openpype/hosts/maya/plugins/publish/validate_maya_units.py b/openpype/hosts/maya/plugins/publish/validate_maya_units.py index 5f67adec76..5698d795ff 100644 --- a/openpype/hosts/maya/plugins/publish/validate_maya_units.py +++ b/openpype/hosts/maya/plugins/publish/validate_maya_units.py @@ -1,10 +1,14 @@ import maya.cmds as cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.lib as mayalib from openpype.pipeline.context_tools import get_current_project_asset from math import ceil +from openpype.pipeline.publish import ( + RepairContextAction, + ValidateSceneOrder, +) def float_round(num, places=0, direction=ceil): @@ -14,10 +18,10 @@ def float_round(num, places=0, direction=ceil): class ValidateMayaUnits(pyblish.api.ContextPlugin): """Check if the Maya units are set correct""" - order = openpype.api.ValidateSceneOrder + order = ValidateSceneOrder label = "Maya Units" hosts = ['maya'] - actions = [openpype.api.RepairContextAction] + actions = [RepairContextAction] validate_linear_units = True linear_units = "cm" diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_arnold_attributes.py b/openpype/hosts/maya/plugins/publish/validate_mesh_arnold_attributes.py index 90eb01aa12..c1c0636b9e 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_arnold_attributes.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_arnold_attributes.py @@ -1,9 +1,12 @@ import pymel.core as pc from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api.lib import maintained_selection +from openpype.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, +) class ValidateMeshArnoldAttributes(pyblish.api.InstancePlugin): @@ -13,14 +16,14 @@ class ValidateMeshArnoldAttributes(pyblish.api.InstancePlugin): later published looks can discover non-default Arnold attributes. """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ["maya"] families = ["model"] category = "geometry" label = "Mesh Arnold Attributes" actions = [ openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction + RepairAction ] optional = True if cmds.getAttr( diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py b/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py index 8f9b5d1c4e..36a0da7a59 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py @@ -3,8 +3,8 @@ import re from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateMeshOrder def len_flattened(components): @@ -45,7 +45,7 @@ class ValidateMeshHasUVs(pyblish.api.InstancePlugin): UVs for every face. """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] category = 'geometry' diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py b/openpype/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py index 8fa1f3cf3b..4427c6eece 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py @@ -1,8 +1,8 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateMeshOrder class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin): @@ -12,7 +12,7 @@ class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] category = 'geometry' diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_ngons.py b/openpype/hosts/maya/plugins/publish/validate_mesh_ngons.py index ab0beb2a9c..5b67db3307 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_ngons.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_ngons.py @@ -1,9 +1,9 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import ValidateContentsOrder class ValidateMeshNgons(pyblish.api.Validator): @@ -16,7 +16,7 @@ class ValidateMeshNgons(pyblish.api.Validator): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["model"] label = "Mesh ngons" diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py b/openpype/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py index 5ccfa7377a..664e2b5772 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py @@ -1,8 +1,8 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateMeshOrder class ValidateMeshNoNegativeScale(pyblish.api.Validator): @@ -17,7 +17,7 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] label = 'Mesh No Negative Scale' diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_non_manifold.py b/openpype/hosts/maya/plugins/publish/validate_mesh_non_manifold.py index 9bd584bbbf..d7711da722 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_non_manifold.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_non_manifold.py @@ -1,8 +1,8 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateMeshOrder class ValidateMeshNonManifold(pyblish.api.Validator): @@ -13,7 +13,7 @@ class ValidateMeshNonManifold(pyblish.api.Validator): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] label = 'Mesh Non-Manifold Vertices/Edges' diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_non_zero_edge.py b/openpype/hosts/maya/plugins/publish/validate_mesh_non_zero_edge.py index 5e6f24cf79..0ef2716559 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_non_zero_edge.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_non_zero_edge.py @@ -1,9 +1,9 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import ValidateMeshOrder class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin): @@ -16,7 +16,7 @@ class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder families = ['model'] hosts = ['maya'] category = 'geometry' diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py b/openpype/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py index 750932df54..c8892a8e59 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py @@ -2,8 +2,11 @@ from maya import cmds import maya.api.OpenMaya as om2 import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, +) class ValidateMeshNormalsUnlocked(pyblish.api.Validator): @@ -14,14 +17,14 @@ class ValidateMeshNormalsUnlocked(pyblish.api.Validator): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] category = 'geometry' version = (0, 1, 0) label = 'Mesh Normals Unlocked' actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] optional = True @staticmethod diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_overlapping_uvs.py b/openpype/hosts/maya/plugins/publish/validate_mesh_overlapping_uvs.py index bf95d8ba09..be7324a68f 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_overlapping_uvs.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_overlapping_uvs.py @@ -1,11 +1,11 @@ import pyblish.api -import openpype.api import openpype.hosts.maya.api.action import math import maya.api.OpenMaya as om import pymel.core as pm from six.moves import xrange +from openpype.pipeline.publish import ValidateMeshOrder class GetOverlappingUVs(object): @@ -232,7 +232,7 @@ class ValidateMeshHasOverlappingUVs(pyblish.api.InstancePlugin): It is optional to warn publisher about it. """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] category = 'geometry' diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_shader_connections.py b/openpype/hosts/maya/plugins/publish/validate_mesh_shader_connections.py index e0835000f0..2a0abe975c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_shader_connections.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_shader_connections.py @@ -1,8 +1,11 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, +) def pairs(iterable): @@ -86,12 +89,12 @@ class ValidateMeshShaderConnections(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] label = "Mesh Shader Connections" actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] def process(self, instance): """Process all the nodes in the instance 'objectSet'""" diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py b/openpype/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py index 9d2aeb7d99..6ca8c06ba5 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py @@ -1,9 +1,12 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, +) class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin): @@ -15,7 +18,7 @@ class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model', 'pointcache'] category = 'uv' @@ -23,7 +26,7 @@ class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin): version = (0, 1, 0) label = "Mesh Single UV Set" actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] @staticmethod def get_invalid(instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py b/openpype/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py index 52c45d3b0c..40ddb916ca 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py @@ -1,8 +1,11 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, +) class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin): @@ -15,13 +18,13 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] optional = True label = "Mesh has map1 UV Set" actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] @staticmethod def get_invalid(instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py b/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py index 463c3c4c50..1e6d290ae7 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py @@ -3,8 +3,11 @@ import re from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, +) def len_flattened(components): @@ -57,13 +60,13 @@ class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] category = 'geometry' label = 'Mesh Vertices Have Edges' actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] @classmethod def repair(cls, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_model_content.py b/openpype/hosts/maya/plugins/publish/validate_model_content.py index aee0ea52f0..723346a285 100644 --- a/openpype/hosts/maya/plugins/publish/validate_model_content.py +++ b/openpype/hosts/maya/plugins/publish/validate_model_content.py @@ -1,9 +1,9 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import ValidateContentsOrder class ValidateModelContent(pyblish.api.InstancePlugin): @@ -14,7 +14,7 @@ class ValidateModelContent(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["model"] label = "Model Content" diff --git a/openpype/hosts/maya/plugins/publish/validate_model_name.py b/openpype/hosts/maya/plugins/publish/validate_model_name.py index 02107d5732..2dec9ba267 100644 --- a/openpype/hosts/maya/plugins/publish/validate_model_name.py +++ b/openpype/hosts/maya/plugins/publish/validate_model_name.py @@ -5,8 +5,8 @@ import re from maya import cmds import pyblish.api -import openpype.api from openpype.pipeline import legacy_io +from openpype.pipeline.publish import ValidateContentsOrder import openpype.hosts.maya.api.action from openpype.hosts.maya.api.shader_definition_editor import ( DEFINITION_FILENAME) @@ -23,7 +23,7 @@ class ValidateModelName(pyblish.api.InstancePlugin): """ optional = True - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["model"] label = "Model Name" diff --git a/openpype/hosts/maya/plugins/publish/validate_muster_connection.py b/openpype/hosts/maya/plugins/publish/validate_muster_connection.py index 6dc7bd3bc4..c31ccf405c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_muster_connection.py +++ b/openpype/hosts/maya/plugins/publish/validate_muster_connection.py @@ -5,8 +5,10 @@ import appdirs import pyblish.api from openpype.lib import requests_get -from openpype.plugin import contextplugin_should_run -import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ( + context_plugin_should_run, + RepairAction, +) class ValidateMusterConnection(pyblish.api.ContextPlugin): @@ -21,12 +23,12 @@ class ValidateMusterConnection(pyblish.api.ContextPlugin): token = None if not os.environ.get("MUSTER_REST_URL"): active = False - actions = [openpype.api.RepairAction] + actions = [RepairAction] def process(self, context): # Workaround bug pyblish-base#250 - if not contextplugin_should_run(self, context): + if not context_plugin_should_run(self, context): return # test if we have environment set (redundant as this plugin shouldn' diff --git a/openpype/hosts/maya/plugins/publish/validate_mvlook_contents.py b/openpype/hosts/maya/plugins/publish/validate_mvlook_contents.py index bac2c030c8..67fc1616c2 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mvlook_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_mvlook_contents.py @@ -1,15 +1,15 @@ -import pyblish.api -import openpype.api -import openpype.hosts.maya.api.action - import os +import pyblish.api +import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder + COLOUR_SPACES = ['sRGB', 'linear', 'auto'] MIPMAP_EXTENSIONS = ['tdl'] class ValidateMvLookContents(pyblish.api.InstancePlugin): - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['mvLook'] hosts = ['maya'] label = 'Validate mvLook Data' diff --git a/openpype/hosts/maya/plugins/publish/validate_no_animation.py b/openpype/hosts/maya/plugins/publish/validate_no_animation.py index 6621e452f0..2e7cafe4ab 100644 --- a/openpype/hosts/maya/plugins/publish/validate_no_animation.py +++ b/openpype/hosts/maya/plugins/publish/validate_no_animation.py @@ -1,8 +1,8 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateNoAnimation(pyblish.api.Validator): @@ -14,7 +14,7 @@ class ValidateNoAnimation(pyblish.api.Validator): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "No Animation" hosts = ["maya"] families = ["model"] diff --git a/openpype/hosts/maya/plugins/publish/validate_no_default_camera.py b/openpype/hosts/maya/plugins/publish/validate_no_default_camera.py index c3f6f3c38e..1a5773e6a7 100644 --- a/openpype/hosts/maya/plugins/publish/validate_no_default_camera.py +++ b/openpype/hosts/maya/plugins/publish/validate_no_default_camera.py @@ -1,8 +1,8 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateNoDefaultCameras(pyblish.api.InstancePlugin): @@ -13,7 +13,7 @@ class ValidateNoDefaultCameras(pyblish.api.InstancePlugin): settings when being loaded and sometimes being skipped. """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['camera'] version = (0, 1, 0) diff --git a/openpype/hosts/maya/plugins/publish/validate_no_namespace.py b/openpype/hosts/maya/plugins/publish/validate_no_namespace.py index 5b3d6bc9c4..01c77e5b2e 100644 --- a/openpype/hosts/maya/plugins/publish/validate_no_namespace.py +++ b/openpype/hosts/maya/plugins/publish/validate_no_namespace.py @@ -2,7 +2,11 @@ import pymel.core as pm import maya.cmds as cmds import pyblish.api -import openpype.api +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) + import openpype.hosts.maya.api.action @@ -16,14 +20,14 @@ def get_namespace(node_name): class ValidateNoNamespace(pyblish.api.InstancePlugin): """Ensure the nodes don't have a namespace""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['model'] category = 'cleanup' version = (0, 1, 0) label = 'No Namespaces' actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] @staticmethod def get_invalid(instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_no_null_transforms.py b/openpype/hosts/maya/plugins/publish/validate_no_null_transforms.py index 36d61b03e8..b430c2b63c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_no_null_transforms.py +++ b/openpype/hosts/maya/plugins/publish/validate_no_null_transforms.py @@ -1,8 +1,11 @@ import maya.cmds as cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) def has_shape_children(node): @@ -37,13 +40,13 @@ class ValidateNoNullTransforms(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['model'] category = 'cleanup' version = (0, 1, 0) label = 'No Empty/Null Transforms' - actions = [openpype.api.RepairAction, + actions = [RepairAction, openpype.hosts.maya.api.action.SelectInvalidAction] @staticmethod diff --git a/openpype/hosts/maya/plugins/publish/validate_no_unknown_nodes.py b/openpype/hosts/maya/plugins/publish/validate_no_unknown_nodes.py index d140a1f24a..2cfdc28128 100644 --- a/openpype/hosts/maya/plugins/publish/validate_no_unknown_nodes.py +++ b/openpype/hosts/maya/plugins/publish/validate_no_unknown_nodes.py @@ -1,8 +1,8 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateNoUnknownNodes(pyblish.api.InstancePlugin): @@ -16,7 +16,7 @@ class ValidateNoUnknownNodes(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['model', 'rig'] optional = True diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_node_ids.py index d17d34117f..796f4c8d76 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids.py @@ -1,7 +1,7 @@ import pyblish.api -import openpype.api -import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidatePipelineOrder +import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib @@ -14,7 +14,7 @@ class ValidateNodeIDs(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidatePipelineOrder + order = ValidatePipelineOrder label = 'Instance Nodes Have ID' hosts = ['maya'] families = ["model", diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py index 0324be9fc9..68c47f3a96 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py @@ -1,9 +1,12 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateNodeIdsDeformedShape(pyblish.api.InstancePlugin): @@ -16,13 +19,13 @@ class ValidateNodeIdsDeformedShape(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['look'] hosts = ['maya'] label = 'Deformed shape ids' actions = [ openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction + RepairAction ] def process(self, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py index 632b531668..b2f28fd4e5 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py @@ -1,8 +1,8 @@ import pyblish.api -import openpype.api from openpype.client import get_assets from openpype.pipeline import legacy_io +from openpype.pipeline.publish import ValidatePipelineOrder import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib @@ -18,7 +18,7 @@ class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidatePipelineOrder + order = ValidatePipelineOrder label = 'Node Ids in Database' hosts = ['maya'] families = ["*"] diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py index c8bac6e569..f901dc58c4 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py @@ -1,6 +1,6 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidatePipelineOrder import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib @@ -10,7 +10,7 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidatePipelineOrder + order = ValidatePipelineOrder label = 'Node Ids Related (ID)' hosts = ['maya'] families = ["model", diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_unique.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_unique.py index ed9ef526d6..f7a5e6e292 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_unique.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_unique.py @@ -1,7 +1,7 @@ from collections import defaultdict import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidatePipelineOrder import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib @@ -12,7 +12,7 @@ class ValidateNodeIdsUnique(pyblish.api.InstancePlugin): Here we ensure that what has been added to the instance is unique """ - order = openpype.api.ValidatePipelineOrder + order = ValidatePipelineOrder label = 'Non Duplicate Instance Members (ID)' hosts = ['maya'] families = ["model", diff --git a/openpype/hosts/maya/plugins/publish/validate_node_no_ghosting.py b/openpype/hosts/maya/plugins/publish/validate_node_no_ghosting.py index 38f3ab1e68..0f608dab2c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_no_ghosting.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_no_ghosting.py @@ -1,8 +1,9 @@ from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateNodeNoGhosting(pyblish.api.InstancePlugin): @@ -17,7 +18,7 @@ class ValidateNodeNoGhosting(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['model', 'rig'] label = "No Ghosting" diff --git a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py index 4d3796e429..78bb022785 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py @@ -1,7 +1,10 @@ from maya import cmds import pyblish.api -import openpype.api +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateRenderImageRule(pyblish.api.InstancePlugin): @@ -13,11 +16,11 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Images File Rule (Workspace)" hosts = ["maya"] families = ["renderlayer"] - actions = [openpype.api.RepairAction] + actions = [RepairAction] def process(self, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_render_no_default_cameras.py b/openpype/hosts/maya/plugins/publish/validate_render_no_default_cameras.py index 044cc7c6a2..67ece75af8 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_no_default_cameras.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_no_default_cameras.py @@ -1,14 +1,15 @@ from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateRenderNoDefaultCameras(pyblish.api.InstancePlugin): """Ensure no default (startup) cameras are to be rendered.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['renderlayer'] label = "No Default Cameras Renderable" diff --git a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py index 35b87fd0ab..77322fefd5 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py @@ -3,9 +3,9 @@ import re import pyblish.api from maya import cmds -import openpype.api import openpype.hosts.maya.api.action -from openpype.hosts.maya.api.render_settings import RenderSettings +from openpype.hosts.maya.api.lib_rendersettings import RenderSettings +from openpype.pipeline.publish import ValidateContentsOrder class ValidateRenderSingleCamera(pyblish.api.InstancePlugin): @@ -15,7 +15,7 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin): prefix must contain token. """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Render Single Camera" hosts = ['maya'] families = ["renderlayer", diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index f19c0bff36..94e2633593 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -6,17 +6,26 @@ from collections import OrderedDict from maya import cmds, mel import pyblish.api -import openpype.api +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) from openpype.hosts.maya.api import lib +def get_redshift_image_format_labels(): + """Return nice labels for Redshift image formats.""" + var = "$g_redshiftImageFormatLabels" + return mel.eval("{0}={0}".format(var)) + + class ValidateRenderSettings(pyblish.api.InstancePlugin): """Validates the global render settings - * File Name Prefix must start with: `maya/` + * File Name Prefix must start with: `` all other token are customizable but sane values for Arnold are: - `maya///_` + `//_` token is supported also, useful for multiple renderable cameras per render layer. @@ -39,11 +48,11 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Render Settings" hosts = ["maya"] families = ["renderlayer"] - actions = [openpype.api.RepairAction] + actions = [RepairAction] ImagePrefixes = { 'mentalray': 'defaultRenderGlobals.imageFilePrefix', @@ -55,12 +64,12 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): } ImagePrefixTokens = { - 'mentalray': 'maya///{aov_separator}', # noqa: E501 - 'arnold': 'maya///{aov_separator}', # noqa: E501 - 'redshift': 'maya///', - 'vray': 'maya///', + 'mentalray': '//{aov_separator}', # noqa: E501 + 'arnold': '//{aov_separator}', # noqa: E501 + 'redshift': '//', + 'vray': '//', 'renderman': '{aov_separator}..', - 'mayahardware2': 'maya///', + 'mayahardware2': '//', } _aov_chars = { @@ -71,7 +80,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): redshift_AOV_prefix = "/{aov_separator}" # noqa: E501 - renderman_dir_prefix = "maya//" + renderman_dir_prefix = "/" R_AOV_TOKEN = re.compile( r'%a||', re.IGNORECASE) @@ -81,8 +90,8 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): R_SCENE_TOKEN = re.compile(r'%s|', re.IGNORECASE) DEFAULT_PADDING = 4 - VRAY_PREFIX = "maya///" - DEFAULT_PREFIX = "maya///_" + VRAY_PREFIX = "//" + DEFAULT_PREFIX = "//_" def process(self, instance): @@ -102,8 +111,9 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): # Get the node attributes for current renderer attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS['default']) + # Prefix attribute can return None when a value was never set prefix = lib.get_attr_in_layer(cls.ImagePrefixes[renderer], - layer=layer) + layer=layer) or "" padding = lib.get_attr_in_layer("{node}.{padding}".format(**attrs), layer=layer) @@ -113,7 +123,6 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): prefix = prefix.replace( "{aov_separator}", instance.data.get("aovSeparator", "_")) - required_prefix = "maya/" default_prefix = cls.ImagePrefixTokens[renderer] if not anim_override: @@ -121,15 +130,6 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): cls.log.error("Animation needs to be enabled. Use the same " "frame for start and end to render single frame") - if renderer != "renderman" and not prefix.lower().startswith( - required_prefix): - invalid = True - cls.log.error( - ("Wrong image prefix [ {} ] " - " - doesn't start with: '{}'").format( - prefix, required_prefix) - ) - if not re.search(cls.R_LAYER_TOKEN, prefix): invalid = True cls.log.error("Wrong image prefix [ {} ] - " @@ -180,18 +180,22 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): redshift_AOV_prefix )) invalid = True - # get aov format - aov_ext = cmds.getAttr( - "{}.fileFormat".format(aov), asString=True) - default_ext = cmds.getAttr( - "redshiftOptions.imageFormat", asString=True) + # check aov file format + aov_ext = cmds.getAttr("{}.fileFormat".format(aov)) + default_ext = cmds.getAttr("redshiftOptions.imageFormat") + aov_type = cmds.getAttr("{}.aovType".format(aov)) + if aov_type == "Cryptomatte": + # redshift Cryptomatte AOV always uses "Cryptomatte (EXR)" + # so we ignore validating file format for it. + pass - if default_ext != aov_ext: - cls.log.error(("AOV file format is not the same " - "as the one set globally " - "{} != {}").format(default_ext, - aov_ext)) + elif default_ext != aov_ext: + labels = get_redshift_image_format_labels() + cls.log.error( + "AOV file format {} does not match global file format " + "{}".format(labels[aov_ext], labels[default_ext]) + ) invalid = True if renderer == "renderman": @@ -254,14 +258,20 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): # go through definitions and test if such node.attribute exists. # if so, compare its value from the one required. for attr, value in OrderedDict(validation_settings).items(): - # first get node of that type cls.log.debug("{}: {}".format(attr, value)) - node_type = attr.split(".")[0] - attribute_name = ".".join(attr.split(".")[1:]) + if "." not in attr: + cls.log.warning("Skipping invalid attribute defined in " + "validation settings: '{}'".format(attr)) + continue + + node_type, attribute_name = attr.split(".", 1) + + # first get node of that type nodes = cmds.ls(type=node_type) - if not isinstance(nodes, list): - cls.log.warning("No nodes of '{}' found.".format(node_type)) + if not nodes: + cls.log.warning( + "No nodes of type '{}' found.".format(node_type)) continue for node in nodes: @@ -299,6 +309,9 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): default = lib.RENDER_ATTRS['default'] render_attrs = lib.RENDER_ATTRS.get(renderer, default) + # Repair animation must be enabled + cmds.setAttr("defaultRenderGlobals.animation", True) + # Repair prefix if renderer != "renderman": node = render_attrs["node"] @@ -331,8 +344,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): cmds.optionMenuGrp("vrayRenderElementSeparator", v=instance.data.get("aovSeparator", "_")) cmds.setAttr( - "{}.fileNameRenderElementSeparator".format( - node), + "{}.fileNameRenderElementSeparator".format(node), instance.data.get("aovSeparator", "_"), type="string" ) diff --git a/openpype/hosts/maya/plugins/publish/validate_resources.py b/openpype/hosts/maya/plugins/publish/validate_resources.py index 08f0f5467c..b7bd47ad0a 100644 --- a/openpype/hosts/maya/plugins/publish/validate_resources.py +++ b/openpype/hosts/maya/plugins/publish/validate_resources.py @@ -2,7 +2,7 @@ import os from collections import defaultdict import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateResources(pyblish.api.InstancePlugin): @@ -17,7 +17,7 @@ class ValidateResources(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Resources Unique" def process(self, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py b/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py index 04cc9ab5fb..361c594013 100644 --- a/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py +++ b/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py @@ -1,14 +1,16 @@ # -*- coding: utf-8 -*- import collections import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateReviewSubsetUniqueness(pyblish.api.ContextPlugin): """Validates that review subset has unique name.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["review"] label = "Validate Review Subset Unique" diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_contents.py b/openpype/hosts/maya/plugins/publish/validate_rig_contents.py index 6fe51d7b51..1096c95486 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_contents.py @@ -1,7 +1,7 @@ from maya import cmds import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateRigContents(pyblish.api.InstancePlugin): @@ -13,7 +13,7 @@ class ValidateRigContents(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Rig Contents" hosts = ["maya"] families = ["rig"] diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_controllers.py b/openpype/hosts/maya/plugins/publish/validate_rig_controllers.py index d5a1fd3529..1e42abdcd9 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_controllers.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_controllers.py @@ -2,7 +2,10 @@ from maya import cmds import pyblish.api -import openpype.api +from openpype.pipeline.publish import ( + ValidateContentsOrder, + RepairAction, +) import openpype.hosts.maya.api.action from openpype.hosts.maya.api.lib import undo_chunk @@ -25,11 +28,11 @@ class ValidateRigControllers(pyblish.api.InstancePlugin): - Break all incoming connections to keyable attributes """ - order = openpype.api.ValidateContentsOrder + 0.05 + order = ValidateContentsOrder + 0.05 label = "Rig Controllers" hosts = ["maya"] families = ["rig"] - actions = [openpype.api.RepairAction, + actions = [RepairAction, openpype.hosts.maya.api.action.SelectInvalidAction] # Default controller values diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py b/openpype/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py index 1f1db9156b..55b2ebd6d8 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py @@ -1,8 +1,11 @@ from maya import cmds import pyblish.api -import openpype.api +from openpype.pipeline.publish import ( + ValidateContentsOrder, + RepairAction, +) from openpype.hosts.maya.api import lib import openpype.hosts.maya.api.action @@ -26,11 +29,11 @@ class ValidateRigControllersArnoldAttributes(pyblish.api.InstancePlugin): This validator will ensure they are hidden or unkeyable attributes. """ - order = openpype.api.ValidateContentsOrder + 0.05 + order = ValidateContentsOrder + 0.05 label = "Rig Controllers (Arnold Attributes)" hosts = ["maya"] families = ["rig"] - actions = [openpype.api.RepairAction, + actions = [RepairAction, openpype.hosts.maya.api.action.SelectInvalidAction] attributes = [ diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_joints_hidden.py b/openpype/hosts/maya/plugins/publish/validate_rig_joints_hidden.py index 5df754fff4..d5bf7fd1cf 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_joints_hidden.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_joints_hidden.py @@ -1,9 +1,13 @@ from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateRigJointsHidden(pyblish.api.InstancePlugin): @@ -17,13 +21,13 @@ class ValidateRigJointsHidden(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['rig'] version = (0, 1, 0) label = "Joints Hidden" actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] @staticmethod def get_invalid(instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py index cc3723a6e1..03ba381f8d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py @@ -1,9 +1,13 @@ import maya.cmds as cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): @@ -16,13 +20,13 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["rig"] hosts = ['maya'] label = 'Rig Out Set Node Ids' actions = [ openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction + RepairAction ] allow_history_only = False diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_output_ids.py b/openpype/hosts/maya/plugins/publish/validate_rig_output_ids.py index 7c5c540c60..f3ed1a36ef 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_output_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_output_ids.py @@ -2,8 +2,11 @@ import pymel.core as pc import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateRigOutputIds(pyblish.api.InstancePlugin): @@ -13,11 +16,11 @@ class ValidateRigOutputIds(pyblish.api.InstancePlugin): to ensure the id from the model is preserved through animation. """ - order = openpype.api.ValidateContentsOrder + 0.05 + order = ValidateContentsOrder + 0.05 label = "Rig Output Ids" hosts = ["maya"] families = ["rig"] - actions = [openpype.api.RepairAction, + actions = [RepairAction, openpype.hosts.maya.api.action.SelectInvalidAction] def process(self, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_scene_set_workspace.py b/openpype/hosts/maya/plugins/publish/validate_scene_set_workspace.py index 174bc44a6f..ec2bea220d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_scene_set_workspace.py +++ b/openpype/hosts/maya/plugins/publish/validate_scene_set_workspace.py @@ -3,7 +3,8 @@ import os import maya.cmds as cmds import pyblish.api -import openpype.api + +from openpype.pipeline.publish import ValidatePipelineOrder def is_subdir(path, root_dir): @@ -28,7 +29,7 @@ def is_subdir(path, root_dir): class ValidateSceneSetWorkspace(pyblish.api.ContextPlugin): """Validate the scene is inside the currently set Maya workspace""" - order = openpype.api.ValidatePipelineOrder + order = ValidatePipelineOrder hosts = ['maya'] category = 'scene' version = (0, 1, 0) diff --git a/openpype/hosts/maya/plugins/publish/validate_setdress_root.py b/openpype/hosts/maya/plugins/publish/validate_setdress_root.py index 8e23a7c04f..5fd971f8c4 100644 --- a/openpype/hosts/maya/plugins/publish/validate_setdress_root.py +++ b/openpype/hosts/maya/plugins/publish/validate_setdress_root.py @@ -1,12 +1,11 @@ - import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateSetdressRoot(pyblish.api.InstancePlugin): """Validate if set dress top root node is published.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "SetDress Root" hosts = ["maya"] families = ["setdress"] diff --git a/openpype/hosts/maya/plugins/publish/validate_shader_name.py b/openpype/hosts/maya/plugins/publish/validate_shader_name.py index 24111f0ad4..b3e51f011d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_shader_name.py +++ b/openpype/hosts/maya/plugins/publish/validate_shader_name.py @@ -1,9 +1,10 @@ +import re from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action -import re +from openpype.pipeline.publish import ValidateContentsOrder class ValidateShaderName(pyblish.api.InstancePlugin): @@ -13,7 +14,7 @@ class ValidateShaderName(pyblish.api.InstancePlugin): """ optional = True - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["look"] hosts = ['maya'] label = 'Validate Shaders Name' diff --git a/openpype/hosts/maya/plugins/publish/validate_shape_default_names.py b/openpype/hosts/maya/plugins/publish/validate_shape_default_names.py index e08e06b50e..651c6bcec9 100644 --- a/openpype/hosts/maya/plugins/publish/validate_shape_default_names.py +++ b/openpype/hosts/maya/plugins/publish/validate_shape_default_names.py @@ -3,8 +3,12 @@ import re from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ( + ValidateContentsOrder, + RepairAction, +) def short_name(node): @@ -31,7 +35,7 @@ class ValidateShapeDefaultNames(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['model'] category = 'cleanup' @@ -39,7 +43,7 @@ class ValidateShapeDefaultNames(pyblish.api.InstancePlugin): version = (0, 1, 0) label = "Shape Default Naming" actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] @staticmethod def _define_default_name(shape): diff --git a/openpype/hosts/maya/plugins/publish/validate_shape_render_stats.py b/openpype/hosts/maya/plugins/publish/validate_shape_render_stats.py index 714451bb98..f58c0aaf81 100644 --- a/openpype/hosts/maya/plugins/publish/validate_shape_render_stats.py +++ b/openpype/hosts/maya/plugins/publish/validate_shape_render_stats.py @@ -1,20 +1,23 @@ import pyblish.api -import openpype.api from maya import cmds import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, +) class ValidateShapeRenderStats(pyblish.api.Validator): """Ensure all render stats are set to the default values.""" - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] label = 'Shape Default Render Stats' actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] defaults = {'castsShadows': 1, 'receiveShadows': 1, diff --git a/openpype/hosts/maya/plugins/publish/validate_shape_zero.py b/openpype/hosts/maya/plugins/publish/validate_shape_zero.py index 343eaccb7d..7a7e9a0aee 100644 --- a/openpype/hosts/maya/plugins/publish/validate_shape_zero.py +++ b/openpype/hosts/maya/plugins/publish/validate_shape_zero.py @@ -1,9 +1,13 @@ from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import ( + ValidateContentsOrder, + RepairAction, +) class ValidateShapeZero(pyblish.api.Validator): @@ -13,13 +17,13 @@ class ValidateShapeZero(pyblish.api.Validator): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["model"] label = "Shape Zero (Freeze)" actions = [ openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction + RepairAction ] @staticmethod diff --git a/openpype/hosts/maya/plugins/publish/validate_single_assembly.py b/openpype/hosts/maya/plugins/publish/validate_single_assembly.py index 9fb3a47e6d..8771ca58d1 100644 --- a/openpype/hosts/maya/plugins/publish/validate_single_assembly.py +++ b/openpype/hosts/maya/plugins/publish/validate_single_assembly.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateSingleAssembly(pyblish.api.InstancePlugin): @@ -17,7 +17,7 @@ class ValidateSingleAssembly(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['rig', 'animation'] label = 'Single Assembly' diff --git a/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py b/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py index 54a86d27cf..8221c18b17 100644 --- a/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py +++ b/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py @@ -1,7 +1,10 @@ # -*- coding: utf-8 -*- import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError + +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) from maya import cmds @@ -9,7 +12,7 @@ from maya import cmds class ValidateSkeletalMeshHierarchy(pyblish.api.InstancePlugin): """Validates that nodes has common root.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["skeletalMesh"] label = "Skeletal Mesh Top Node" diff --git a/openpype/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py b/openpype/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py index 8c804786f3..b45d2b120a 100644 --- a/openpype/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py +++ b/openpype/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py @@ -1,8 +1,9 @@ from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateSkinclusterDeformerSet(pyblish.api.InstancePlugin): @@ -14,7 +15,7 @@ class ValidateSkinclusterDeformerSet(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['fbx'] label = "Skincluster Deformer Relationships" diff --git a/openpype/hosts/maya/plugins/publish/validate_step_size.py b/openpype/hosts/maya/plugins/publish/validate_step_size.py index 172ac5f26e..294458f63c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_step_size.py +++ b/openpype/hosts/maya/plugins/publish/validate_step_size.py @@ -1,6 +1,7 @@ import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateStepSize(pyblish.api.InstancePlugin): @@ -10,7 +11,7 @@ class ValidateStepSize(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = 'Step size' families = ['camera', 'pointcache', diff --git a/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py b/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py index 6f5ff24b9c..4615e2ec07 100644 --- a/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py +++ b/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py @@ -3,8 +3,9 @@ from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin): @@ -27,7 +28,7 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['model'] category = 'cleanup' diff --git a/openpype/hosts/maya/plugins/publish/validate_transform_zero.py b/openpype/hosts/maya/plugins/publish/validate_transform_zero.py index fdd09658d1..da569195e8 100644 --- a/openpype/hosts/maya/plugins/publish/validate_transform_zero.py +++ b/openpype/hosts/maya/plugins/publish/validate_transform_zero.py @@ -1,8 +1,9 @@ from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateTransformZero(pyblish.api.Validator): @@ -14,7 +15,7 @@ class ValidateTransformZero(pyblish.api.Validator): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["model"] category = "geometry" diff --git a/openpype/plugins/publish/validate_unique_names.py b/openpype/hosts/maya/plugins/publish/validate_unique_names.py similarity index 86% rename from openpype/plugins/publish/validate_unique_names.py rename to openpype/hosts/maya/plugins/publish/validate_unique_names.py index 459c90e6c1..05776ee0f3 100644 --- a/openpype/plugins/publish/validate_unique_names.py +++ b/openpype/hosts/maya/plugins/publish/validate_unique_names.py @@ -1,8 +1,8 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateUniqueNames(pyblish.api.Validator): @@ -12,7 +12,7 @@ class ValidateUniqueNames(pyblish.api.Validator): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["model"] label = "Unique transform name" @@ -23,7 +23,7 @@ class ValidateUniqueNames(pyblish.api.Validator): """Returns the invalid transforms in the instance. Returns: - list: Non unique name transforms + list: Non-unique name transforms. """ diff --git a/openpype/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py b/openpype/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py index c05121a1b0..4211e76a73 100644 --- a/openpype/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py +++ b/openpype/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py @@ -2,13 +2,15 @@ from maya import cmds import pyblish.api -import openpype.api + +from openpype.pipeline.publish import ValidateMeshOrder +import openpype.hosts.maya.api.action class ValidateUnrealMeshTriangulated(pyblish.api.InstancePlugin): """Validate if mesh is made of triangles for Unreal Engine""" - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ["maya"] families = ["staticMesh"] category = "geometry" diff --git a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py index 33788d1835..1425190b82 100644 --- a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py +++ b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py @@ -3,10 +3,11 @@ import re import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action from openpype.pipeline import legacy_io -from openpype.api import get_project_settings +from openpype.settings import get_project_settings +from openpype.pipeline.publish import ValidateContentsOrder class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin): @@ -50,7 +51,7 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin): """ optional = True - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["staticMesh"] label = "Unreal Static Mesh Name" diff --git a/openpype/hosts/maya/plugins/publish/validate_unreal_up_axis.py b/openpype/hosts/maya/plugins/publish/validate_unreal_up_axis.py index 5e1b04889f..dd699735d9 100644 --- a/openpype/hosts/maya/plugins/publish/validate_unreal_up_axis.py +++ b/openpype/hosts/maya/plugins/publish/validate_unreal_up_axis.py @@ -2,7 +2,11 @@ from maya import cmds import pyblish.api -import openpype.api + +from openpype.pipeline.publish import ( + ValidateContentsOrder, + RepairAction, +) class ValidateUnrealUpAxis(pyblish.api.ContextPlugin): @@ -10,11 +14,11 @@ class ValidateUnrealUpAxis(pyblish.api.ContextPlugin): optional = True active = False - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["staticMesh"] label = "Unreal Up-Axis check" - actions = [openpype.api.RepairAction] + actions = [RepairAction] def process(self, context): assert cmds.upAxis(q=True, axis=True) == "z", ( diff --git a/openpype/hosts/maya/plugins/publish/validate_visible_only.py b/openpype/hosts/maya/plugins/publish/validate_visible_only.py index 59a7f976ab..faf634f258 100644 --- a/openpype/hosts/maya/plugins/publish/validate_visible_only.py +++ b/openpype/hosts/maya/plugins/publish/validate_visible_only.py @@ -1,8 +1,8 @@ import pyblish.api -import openpype.api from openpype.hosts.maya.api.lib import iter_visible_nodes_in_range import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateAlembicVisibleOnly(pyblish.api.InstancePlugin): @@ -12,7 +12,7 @@ class ValidateAlembicVisibleOnly(pyblish.api.InstancePlugin): on the instance - otherwise the validation is skipped. """ - order = openpype.api.ValidateContentsOrder + 0.05 + order = ValidateContentsOrder + 0.05 label = "Alembic Visible Only" hosts = ["maya"] families = ["pointcache", "animation"] diff --git a/openpype/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py b/openpype/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py index 5e35565383..366f3bd10e 100644 --- a/openpype/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py +++ b/openpype/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py @@ -1,6 +1,9 @@ import pyblish.api -import openpype.api from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import ( + ValidateContentsOrder, + RepairAction, +) from maya import cmds @@ -15,10 +18,10 @@ class ValidateVRayDistributedRendering(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "VRay Distributed Rendering" families = ["renderlayer"] - actions = [openpype.api.RepairAction] + actions = [RepairAction] # V-Ray attribute names enabled_attr = "vraySettings.sys_distributed_rendering_on" diff --git a/openpype/hosts/maya/plugins/publish/validate_vray_referenced_aovs.py b/openpype/hosts/maya/plugins/publish/validate_vray_referenced_aovs.py index 7a48c29b7d..39c721e717 100644 --- a/openpype/hosts/maya/plugins/publish/validate_vray_referenced_aovs.py +++ b/openpype/hosts/maya/plugins/publish/validate_vray_referenced_aovs.py @@ -4,7 +4,7 @@ import pyblish.api import types from maya import cmds -import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairContextAction class ValidateVrayReferencedAOVs(pyblish.api.InstancePlugin): @@ -20,7 +20,7 @@ class ValidateVrayReferencedAOVs(pyblish.api.InstancePlugin): label = 'VRay Referenced AOVs' hosts = ['maya'] families = ['renderlayer'] - actions = [openpype.api.RepairContextAction] + actions = [RepairContextAction] def process(self, instance): """Plugin main entry point.""" diff --git a/openpype/hosts/maya/plugins/publish/validate_vray_translator_settings.py b/openpype/hosts/maya/plugins/publish/validate_vray_translator_settings.py index 1deabde4a2..f49811c2c0 100644 --- a/openpype/hosts/maya/plugins/publish/validate_vray_translator_settings.py +++ b/openpype/hosts/maya/plugins/publish/validate_vray_translator_settings.py @@ -1,8 +1,11 @@ # -*- coding: utf-8 -*- """Validate VRay Translator settings.""" import pyblish.api -import openpype.api -from openpype.plugin import contextplugin_should_run +from openpype.pipeline.publish import ( + context_plugin_should_run, + RepairContextAction, + ValidateContentsOrder, +) from maya import cmds @@ -10,15 +13,15 @@ from maya import cmds class ValidateVRayTranslatorEnabled(pyblish.api.ContextPlugin): """Validate VRay Translator settings for extracting vrscenes.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "VRay Translator Settings" families = ["vrayscene_layer"] - actions = [openpype.api.RepairContextAction] + actions = [RepairContextAction] def process(self, context): """Plugin entry point.""" # Workaround bug pyblish-base#250 - if not contextplugin_should_run(self, context): + if not context_plugin_should_run(self, context): return invalid = self.get_invalid(context) diff --git a/openpype/hosts/maya/plugins/publish/validate_vrayproxy_members.py b/openpype/hosts/maya/plugins/publish/validate_vrayproxy_members.py index b94e5cbbed..855a96e6b9 100644 --- a/openpype/hosts/maya/plugins/publish/validate_vrayproxy_members.py +++ b/openpype/hosts/maya/plugins/publish/validate_vrayproxy_members.py @@ -1,5 +1,4 @@ import pyblish.api -import openpype.api from maya import cmds diff --git a/openpype/hosts/maya/plugins/publish/validate_yeti_renderscript_callbacks.py b/openpype/hosts/maya/plugins/publish/validate_yeti_renderscript_callbacks.py index 79cd09315e..a864a18cee 100644 --- a/openpype/hosts/maya/plugins/publish/validate_yeti_renderscript_callbacks.py +++ b/openpype/hosts/maya/plugins/publish/validate_yeti_renderscript_callbacks.py @@ -1,7 +1,7 @@ from maya import cmds import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateYetiRenderScriptCallbacks(pyblish.api.InstancePlugin): @@ -20,7 +20,7 @@ class ValidateYetiRenderScriptCallbacks(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Yeti Render Script Callbacks" hosts = ["maya"] families = ["renderlayer"] diff --git a/openpype/hosts/maya/plugins/publish/validate_yeti_rig_cache_state.py b/openpype/hosts/maya/plugins/publish/validate_yeti_rig_cache_state.py index 5610733577..4842134b12 100644 --- a/openpype/hosts/maya/plugins/publish/validate_yeti_rig_cache_state.py +++ b/openpype/hosts/maya/plugins/publish/validate_yeti_rig_cache_state.py @@ -1,7 +1,7 @@ import pyblish.api -import openpype.action import maya.cmds as cmds import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairAction class ValidateYetiRigCacheState(pyblish.api.InstancePlugin): @@ -17,7 +17,7 @@ class ValidateYetiRigCacheState(pyblish.api.InstancePlugin): label = "Yeti Rig Cache State" hosts = ["maya"] families = ["yetiRig"] - actions = [openpype.action.RepairAction, + actions = [RepairAction, openpype.hosts.maya.api.action.SelectInvalidAction] def process(self, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py b/openpype/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py index 651c8da849..ebef44774d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py +++ b/openpype/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py @@ -1,14 +1,15 @@ from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateYetiRigInputShapesInInstance(pyblish.api.Validator): """Validate if all input nodes are part of the instance's hierarchy""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["yetiRig"] label = "Yeti Rig Input Shapes In Instance" diff --git a/openpype/hosts/maya/resources/workspace.mel b/openpype/hosts/maya/resources/workspace.mel deleted file mode 100644 index f7213fa4f6..0000000000 --- a/openpype/hosts/maya/resources/workspace.mel +++ /dev/null @@ -1,11 +0,0 @@ -//Maya 2018 Project Definition - -workspace -fr "shaders" "renderData/shaders"; -workspace -fr "alembicCache" "cache/alembic"; -workspace -fr "mayaAscii" ""; -workspace -fr "mayaBinary" ""; -workspace -fr "renderData" "renderData"; -workspace -fr "fileCache" "cache/nCache"; -workspace -fr "scene" ""; -workspace -fr "sourceImages" "sourceimages"; -workspace -fr "images" "renders"; diff --git a/openpype/hosts/maya/startup/userSetup.py b/openpype/hosts/maya/startup/userSetup.py index 10e68c2ddb..40cd51f2d8 100644 --- a/openpype/hosts/maya/startup/userSetup.py +++ b/openpype/hosts/maya/startup/userSetup.py @@ -1,5 +1,5 @@ import os -from openpype.api import get_project_settings +from openpype.settings import get_project_settings from openpype.pipeline import install_host from openpype.hosts.maya.api import MayaHost from maya import cmds diff --git a/openpype/hosts/nuke/__init__.py b/openpype/hosts/nuke/__init__.py index 134a6621c4..8ab565939b 100644 --- a/openpype/hosts/nuke/__init__.py +++ b/openpype/hosts/nuke/__init__.py @@ -1,41 +1,10 @@ -import os -import platform +from .addon import ( + NUKE_ROOT_DIR, + NukeAddon, +) -def add_implementation_envs(env, _app): - # Add requirements to NUKE_PATH - pype_root = os.environ["OPENPYPE_REPOS_ROOT"] - new_nuke_paths = [ - os.path.join(pype_root, "openpype", "hosts", "nuke", "startup") - ] - old_nuke_path = env.get("NUKE_PATH") or "" - for path in old_nuke_path.split(os.pathsep): - if not path: - continue - - norm_path = os.path.normpath(path) - if norm_path not in new_nuke_paths: - new_nuke_paths.append(norm_path) - - env["NUKE_PATH"] = os.pathsep.join(new_nuke_paths) - env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) - - # Try to add QuickTime to PATH - quick_time_path = "C:/Program Files (x86)/QuickTime/QTSystem" - if platform.system() == "windows" and os.path.exists(quick_time_path): - path_value = env.get("PATH") or "" - path_paths = [ - path - for path in path_value.split(os.pathsep) - if path - ] - path_paths.append(quick_time_path) - env["PATH"] = os.pathsep.join(path_paths) - - # Set default values if are not already set via settings - defaults = { - "LOGLEVEL": "DEBUG" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value +__all__ = ( + "NUKE_ROOT_DIR", + "NukeAddon", +) diff --git a/openpype/hosts/nuke/addon.py b/openpype/hosts/nuke/addon.py new file mode 100644 index 0000000000..54e4da5195 --- /dev/null +++ b/openpype/hosts/nuke/addon.py @@ -0,0 +1,63 @@ +import os +import platform +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +NUKE_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class NukeAddon(OpenPypeModule, IHostAddon): + name = "nuke" + host_name = "nuke" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + # Add requirements to NUKE_PATH + new_nuke_paths = [ + os.path.join(NUKE_ROOT_DIR, "startup") + ] + old_nuke_path = env.get("NUKE_PATH") or "" + for path in old_nuke_path.split(os.pathsep): + if not path: + continue + + norm_path = os.path.normpath(path) + if norm_path not in new_nuke_paths: + new_nuke_paths.append(norm_path) + + env["NUKE_PATH"] = os.pathsep.join(new_nuke_paths) + env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + + # Add vendor to PYTHONPATH + python_path = env["PYTHONPATH"] + python_path_parts = [] + if python_path: + python_path_parts = python_path.split(os.pathsep) + vendor_path = os.path.join(NUKE_ROOT_DIR, "vendor") + python_path_parts.insert(0, vendor_path) + env["PYTHONPATH"] = os.pathsep.join(python_path_parts) + + # Set default values if are not already set via settings + defaults = { + "LOGLEVEL": "DEBUG" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + # Try to add QuickTime to PATH + quick_time_path = "C:/Program Files (x86)/QuickTime/QTSystem" + if platform.system() == "windows" and os.path.exists(quick_time_path): + path_value = env.get("PATH") or "" + path_paths = [ + path + for path in path_value.split(os.pathsep) + if path + ] + path_paths.append(quick_time_path) + env["PATH"] = os.pathsep.join(path_paths) + + def get_workfile_extensions(self): + return [".nk"] diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index b126de4ceb..c65058874b 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -21,13 +21,15 @@ from .pipeline import ( containerise, parse_container, update_container, + + get_workfile_build_placeholder_plugins, ) from .lib import ( maintained_selection, reset_selection, get_view_process_node, - duplicate_node - + duplicate_node, + convert_knob_value_to_correct_type ) from .utils import ( @@ -55,10 +57,13 @@ __all__ = ( "parse_container", "update_container", + "get_workfile_build_placeholder_plugins", + "maintained_selection", "reset_selection", "get_view_process_node", "duplicate_node", + "convert_knob_value_to_correct_type", "colorspace_exists_on_node", "get_colorspace_list" diff --git a/openpype/hosts/nuke/api/actions.py b/openpype/hosts/nuke/api/actions.py index c4a6f0fb84..92b83560da 100644 --- a/openpype/hosts/nuke/api/actions.py +++ b/openpype/hosts/nuke/api/actions.py @@ -1,6 +1,6 @@ import pyblish.api -from openpype.api import get_errored_instances_from_context +from openpype.pipeline.publish import get_errored_instances_from_context from .lib import ( reset_selection, select_nodes diff --git a/openpype/hosts/nuke/api/gizmo_menu.py b/openpype/hosts/nuke/api/gizmo_menu.py index 0f1a3e03fc..9edfc62e3b 100644 --- a/openpype/hosts/nuke/api/gizmo_menu.py +++ b/openpype/hosts/nuke/api/gizmo_menu.py @@ -2,7 +2,7 @@ import os import re import nuke -from openpype.api import Logger +from openpype.lib import Logger log = Logger.get_logger(__name__) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index a53d932db1..1aea04d889 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -19,17 +19,19 @@ from openpype.client import ( get_last_versions, get_representations, ) -from openpype.api import ( + +from openpype.host import HostDirmap +from openpype.tools.utils import host_tools +from openpype.lib import ( + env_value_to_bool, Logger, get_version_from_path, - get_current_project_settings, ) -from openpype.tools.utils import host_tools -from openpype.lib import env_value_to_bool -from openpype.lib.path_tools import HostDirmap + from openpype.settings import ( get_project_settings, get_anatomy_settings, + get_current_project_settings, ) from openpype.modules import ModulesManager from openpype.pipeline.template_data import get_template_data_with_names @@ -76,6 +78,23 @@ class Context: _project_doc = None +def get_main_window(): + """Acquire Nuke's main window""" + if Context.main_window is None: + from Qt import QtWidgets + + top_widgets = QtWidgets.QApplication.topLevelWidgets() + name = "Foundry::UI::DockMainWindow" + for widget in top_widgets: + if ( + widget.inherits("QMainWindow") + and widget.metaObject().className() == name + ): + Context.main_window = widget + break + return Context.main_window + + class Knobby(object): """For creating knob which it's type isn't mapped in `create_knobs` @@ -544,7 +563,15 @@ def get_node_path(path, padding=4): def get_nuke_imageio_settings(): - return get_anatomy_settings(Context.project_name)["imageio"]["nuke"] + project_imageio = get_project_settings( + Context.project_name)["nuke"]["imageio"] + + # backward compatibility for project started before 3.10 + # those are still having `__legacy__` knob types + if not project_imageio["enabled"]: + return get_anatomy_settings(Context.project_name)["imageio"]["nuke"] + + return get_project_settings(Context.project_name)["nuke"]["imageio"] def get_created_node_imageio_setting_legacy(nodeclass, creator, subset): @@ -1593,28 +1620,35 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs): if not knob_value: continue - # first convert string types to string - # just to ditch unicode - if isinstance(knob_value, six.text_type): - knob_value = str(knob_value) - - # set correctly knob types - if knob_type == "bool": - knob_value = bool(knob_value) - elif knob_type == "decimal_number": - knob_value = float(knob_value) - elif knob_type == "number": - knob_value = int(knob_value) - elif knob_type == "text": - knob_value = knob_value - elif knob_type == "color_gui": - knob_value = color_gui_to_int(knob_value) - elif knob_type in ["2d_vector", "3d_vector", "color"]: - knob_value = [float(v) for v in knob_value] + knob_value = convert_knob_value_to_correct_type( + knob_type, knob_value) node[knob_name].setValue(knob_value) +def convert_knob_value_to_correct_type(knob_type, knob_value): + # first convert string types to string + # just to ditch unicode + if isinstance(knob_value, six.text_type): + knob_value = str(knob_value) + + # set correctly knob types + if knob_type == "bool": + knob_value = bool(knob_value) + elif knob_type == "decimal_number": + knob_value = float(knob_value) + elif knob_type == "number": + knob_value = int(knob_value) + elif knob_type == "text": + knob_value = knob_value + elif knob_type == "color_gui": + knob_value = color_gui_to_int(knob_value) + elif knob_type in ["2d_vector", "3d_vector", "color"]: + knob_value = [float(v) for v in knob_value] + + return knob_value + + def color_gui_to_int(color_gui): hex_value = ( "0x{0:0>2x}{1:0>2x}{2:0>2x}{3:0>2x}").format(*color_gui) @@ -1945,15 +1979,25 @@ class WorkfileSettings(object): if not write_node: return - # write all knobs to node - for knob in nuke_imageio_writes["knobs"]: - value = knob["value"] - if isinstance(value, six.text_type): - value = str(value) - if str(value).startswith("0x"): - value = int(value, 16) + try: + # write all knobs to node + for knob in nuke_imageio_writes["knobs"]: + value = knob["value"] + if isinstance(value, six.text_type): + value = str(value) + if str(value).startswith("0x"): + value = int(value, 16) - write_node[knob["name"]].setValue(value) + log.debug("knob: {}| value: {}".format( + knob["name"], value + )) + write_node[knob["name"]].setValue(value) + except TypeError: + log.warning( + "Legacy workflow didnt work, switching to current") + + set_node_knobs_from_settings( + write_node, nuke_imageio_writes["knobs"]) def set_reads_colorspace(self, read_clrs_inputs): """ Setting colorspace to Read nodes @@ -2010,12 +2054,14 @@ class WorkfileSettings(object): # get imageio nuke_colorspace = get_nuke_imageio_settings() + log.info("Setting colorspace to workfile...") try: self.set_root_colorspace(nuke_colorspace["workfile"]) except AttributeError: msg = "set_colorspace(): missing `workfile` settings in template" nuke.message(msg) + log.info("Setting colorspace to viewers...") try: self.set_viewers_colorspace(nuke_colorspace["viewer"]) except AttributeError: @@ -2023,24 +2069,18 @@ class WorkfileSettings(object): nuke.message(msg) log.error(msg) + log.info("Setting colorspace to write nodes...") try: self.set_writes_colorspace() except AttributeError as _error: nuke.message(_error) log.error(_error) + log.info("Setting colorspace to read nodes...") read_clrs_inputs = nuke_colorspace["regexInputs"].get("inputs", []) if read_clrs_inputs: self.set_reads_colorspace(read_clrs_inputs) - try: - for key in nuke_colorspace: - log.debug("Preset's colorspace key: {}".format(key)) - except TypeError: - msg = "Nuke is not in templates! Contact your supervisor!" - nuke.message(msg) - log.error(msg) - def reset_frame_range_handles(self): """Set frame range to current asset""" @@ -2227,10 +2267,9 @@ def get_write_node_template_attr(node): subset=avalon_knob_data["subset"] ) + # collecting correct data - correct_data = OrderedDict({ - "file": get_render_path(node) - }) + correct_data = OrderedDict() # adding imageio knob presets for k, v in nuke_imageio_writes.items(): @@ -2639,20 +2678,16 @@ def add_scripts_gizmo(): class NukeDirmap(HostDirmap): - def __init__(self, host_name, project_settings, sync_module, file_name): + def __init__(self, file_name, *args, **kwargs): """ - Args: - host_name (str): Nuke - project_settings (dict): settings of current project - sync_module (SyncServerModule): to limit reinitialization - file_name (str): full path of referenced file from workfiles + Args: + file_name (str): full path of referenced file from workfiles + *args (tuple): Positional arguments for 'HostDirmap' class + **kwargs (dict): Keyword arguments for 'HostDirmap' class """ - self.host_name = host_name - self.project_settings = project_settings - self.file_name = file_name - self.sync_module = sync_module - self._mapping = None # cache mapping + self.file_name = file_name + super(NukeDirmap, self).__init__(*args, **kwargs) def on_enable_dirmap(self): pass @@ -2672,14 +2707,20 @@ class NukeDirmap(HostDirmap): class DirmapCache: """Caching class to get settings and sync_module easily and only once.""" + _project_name = None _project_settings = None _sync_module = None + @classmethod + def project_name(cls): + if cls._project_name is None: + cls._project_name = os.getenv("AVALON_PROJECT") + return cls._project_name + @classmethod def project_settings(cls): if cls._project_settings is None: - cls._project_settings = get_project_settings( - os.getenv("AVALON_PROJECT")) + cls._project_settings = get_project_settings(cls.project_name()) return cls._project_settings @classmethod @@ -2690,32 +2731,25 @@ class DirmapCache: @contextlib.contextmanager -def _duplicate_node_temp(): +def node_tempfile(): """Create a temp file where node is pasted during duplication. This is to avoid using clipboard for node duplication. """ - duplicate_node_temp_path = os.path.join( - tempfile.gettempdir(), - "openpype_nuke_duplicate_temp_{}".format(os.getpid()) + tmp_file = tempfile.NamedTemporaryFile( + mode="w", prefix="openpype_nuke_temp_", suffix=".nk", delete=False ) - - # This can happen only if 'duplicate_node' would be - if os.path.exists(duplicate_node_temp_path): - log.warning(( - "Temp file for node duplication already exists." - " Trying to remove {}" - ).format(duplicate_node_temp_path)) - os.remove(duplicate_node_temp_path) + tmp_file.close() + node_tempfile_path = tmp_file.name try: # Yield the path where node can be copied - yield duplicate_node_temp_path + yield node_tempfile_path finally: # Remove the file at the end - os.remove(duplicate_node_temp_path) + os.remove(node_tempfile_path) def duplicate_node(node): @@ -2724,7 +2758,7 @@ def duplicate_node(node): # select required node for duplication node.setSelected(True) - with _duplicate_node_temp() as filepath: + with node_tempfile() as filepath: # copy selected to temp filepath nuke.nodeCopy(filepath) @@ -2745,10 +2779,14 @@ def dirmap_file_name_filter(file_name): Checks project settings for potential mapping from source to dest. """ - dirmap_processor = NukeDirmap("nuke", - DirmapCache.project_settings(), - DirmapCache.sync_module(), - file_name) + + dirmap_processor = NukeDirmap( + file_name, + "nuke", + DirmapCache.project_name(), + DirmapCache.project_settings(), + DirmapCache.sync_module(), + ) dirmap_processor.process_dirmap() if os.path.exists(dirmap_processor.file_name): return dirmap_processor.file_name @@ -2795,3 +2833,100 @@ def ls_img_sequence(path): } return False + + +def get_group_io_nodes(nodes): + """Get the input and the output of a group of nodes.""" + + if not nodes: + raise ValueError("there is no nodes in the list") + + input_node = None + output_node = None + + if len(nodes) == 1: + input_node = output_node = nodes[0] + + else: + for node in nodes: + if "Input" in node.name(): + input_node = node + + if "Output" in node.name(): + output_node = node + + if input_node is not None and output_node is not None: + break + + if input_node is None: + raise ValueError("No Input found") + + if output_node is None: + raise ValueError("No Output found") + return input_node, output_node + + +def get_extreme_positions(nodes): + """Get the 4 numbers that represent the box of a group of nodes.""" + + if not nodes: + raise ValueError("there is no nodes in the list") + + nodes_xpos = [n.xpos() for n in nodes] + \ + [n.xpos() + n.screenWidth() for n in nodes] + + nodes_ypos = [n.ypos() for n in nodes] + \ + [n.ypos() + n.screenHeight() for n in nodes] + + min_x, min_y = (min(nodes_xpos), min(nodes_ypos)) + max_x, max_y = (max(nodes_xpos), max(nodes_ypos)) + return min_x, min_y, max_x, max_y + + +def refresh_node(node): + """Correct a bug caused by the multi-threading of nuke. + + Refresh the node to make sure that it takes the desired attributes. + """ + + x = node.xpos() + y = node.ypos() + nuke.autoplaceSnap(node) + node.setXYpos(x, y) + + +def refresh_nodes(nodes): + for node in nodes: + refresh_node(node) + + +def get_names_from_nodes(nodes): + """Get list of nodes names. + + Args: + nodes(List[nuke.Node]): List of nodes to convert into names. + + Returns: + List[str]: Name of passed nodes. + """ + + return [ + node.name() + for node in nodes + ] + + +def get_nodes_by_names(names): + """Get list of nuke nodes based on their names. + + Args: + names (List[str]): List of node names to be found. + + Returns: + List[nuke.Node]: List of nodes found by name. + """ + + return [ + nuke.toNode(name) + for name in names + ] diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index c1cd8f771a..c343c635fa 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -7,11 +7,8 @@ import nuke import pyblish.api import openpype -from openpype.api import ( - Logger, - get_current_project_settings -) -from openpype.lib import register_event_callback +from openpype.settings import get_current_project_settings +from openpype.lib import register_event_callback, Logger from openpype.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, @@ -26,6 +23,8 @@ from openpype.tools.utils import host_tools from .command import viewer_update_and_undo_stop from .lib import ( + Context, + get_main_window, add_publish_knob, WorkfileSettings, process_workfile_builder, @@ -33,7 +32,13 @@ from .lib import ( check_inventory_versions, set_avalon_knob_data, read_avalon_data, - Context +) +from .workfile_template_builder import ( + NukePlaceholderLoadPlugin, + build_workfile_template, + update_workfile_template, + create_placeholder, + update_placeholder, ) log = Logger.get_logger(__name__) @@ -53,23 +58,6 @@ if os.getenv("PYBLISH_GUI", None): pyblish.api.register_gui(os.getenv("PYBLISH_GUI", None)) -def get_main_window(): - """Acquire Nuke's main window""" - if Context.main_window is None: - from Qt import QtWidgets - - top_widgets = QtWidgets.QApplication.topLevelWidgets() - name = "Foundry::UI::DockMainWindow" - for widget in top_widgets: - if ( - widget.inherits("QMainWindow") - and widget.metaObject().className() == name - ): - Context.main_window = widget - break - return Context.main_window - - def reload_config(): """Attempt to reload pipeline at run-time. @@ -78,7 +66,6 @@ def reload_config(): """ for module in ( - "openpype.api", "openpype.hosts.nuke.api.actions", "openpype.hosts.nuke.api.menu", "openpype.hosts.nuke.api.plugin", @@ -150,6 +137,12 @@ def _show_workfiles(): host_tools.show_workfiles(parent=None, on_top=False) +def get_workfile_build_placeholder_plugins(): + return [ + NukePlaceholderLoadPlugin + ] + + def _install_menu(): # uninstall original avalon menu main_window = get_main_window() @@ -219,6 +212,24 @@ def _install_menu(): lambda: BuildWorkfile().process() ) + menu_template = menu.addMenu("Template Builder") # creating template menu + menu_template.addCommand( + "Build Workfile from template", + lambda: build_workfile_template() + ) + menu_template.addCommand( + "Update Workfile", + lambda: update_workfile_template() + ) + menu_template.addSeparator() + menu_template.addCommand( + "Create Place Holder", + lambda: create_placeholder() + ) + menu_template.addCommand( + "Update Place Holder", + lambda: update_placeholder() + ) menu.addSeparator() menu.addCommand( "Experimental tools...", diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 37ce03dc55..91bb90ff99 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -6,7 +6,7 @@ from abc import abstractmethod import nuke -from openpype.api import get_current_project_settings +from openpype.settings import get_current_project_settings from openpype.pipeline import ( LegacyCreator, LoaderPlugin, diff --git a/openpype/hosts/nuke/api/utils.py b/openpype/hosts/nuke/api/utils.py index 5b0c607292..6bcb752dd1 100644 --- a/openpype/hosts/nuke/api/utils.py +++ b/openpype/hosts/nuke/api/utils.py @@ -1,7 +1,7 @@ import os import nuke -from openpype.api import resources +from openpype import resources from .lib import maintained_selection diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py new file mode 100644 index 0000000000..7a2e442e32 --- /dev/null +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -0,0 +1,578 @@ +import collections + +import nuke + +from openpype.pipeline import registered_host +from openpype.pipeline.workfile.workfile_template_builder import ( + AbstractTemplateBuilder, + PlaceholderPlugin, + LoadPlaceholderItem, + PlaceholderLoadMixin, +) +from openpype.tools.workfile_template_build import ( + WorkfileBuildPlaceholderDialog, +) + +from .lib import ( + find_free_space_to_paste_nodes, + get_extreme_positions, + get_group_io_nodes, + imprint, + refresh_node, + refresh_nodes, + reset_selection, + get_names_from_nodes, + get_nodes_by_names, + select_nodes, + duplicate_node, + node_tempfile, +) + +PLACEHOLDER_SET = "PLACEHOLDERS_SET" + + +class NukeTemplateBuilder(AbstractTemplateBuilder): + """Concrete implementation of AbstractTemplateBuilder for maya""" + + def import_template(self, path): + """Import template into current scene. + Block if a template is already loaded. + + Args: + path (str): A path to current template (usually given by + get_template_path implementation) + + Returns: + bool: Wether the template was succesfully imported or not + """ + + # TODO check if the template is already imported + + nuke.nodePaste(path) + reset_selection() + + return True + + +class NukePlaceholderPlugin(PlaceholderPlugin): + node_color = 4278190335 + + def _collect_scene_placeholders(self): + # Cache placeholder data to shared data + placeholder_nodes = self.builder.get_shared_populate_data( + "placeholder_nodes" + ) + if placeholder_nodes is None: + placeholder_nodes = {} + all_groups = collections.deque() + all_groups.append(nuke.thisGroup()) + while all_groups: + group = all_groups.popleft() + for node in group.nodes(): + if isinstance(node, nuke.Group): + all_groups.append(node) + + node_knobs = node.knobs() + if ( + "builder_type" not in node_knobs + or "is_placeholder" not in node_knobs + or not node.knob("is_placeholder").value() + ): + continue + + if "empty" in node_knobs and node.knob("empty").value(): + continue + + placeholder_nodes[node.fullName()] = node + + self.builder.set_shared_populate_data( + "placeholder_nodes", placeholder_nodes + ) + return placeholder_nodes + + def create_placeholder(self, placeholder_data): + placeholder_data["plugin_identifier"] = self.identifier + + placeholder = nuke.nodes.NoOp() + placeholder.setName("PLACEHOLDER") + placeholder.knob("tile_color").setValue(self.node_color) + + imprint(placeholder, placeholder_data) + imprint(placeholder, {"is_placeholder": True}) + placeholder.knob("is_placeholder").setVisible(False) + + def update_placeholder(self, placeholder_item, placeholder_data): + node = nuke.toNode(placeholder_item.scene_identifier) + imprint(node, placeholder_data) + + def _parse_placeholder_node_data(self, node): + placeholder_data = {} + for key in self.get_placeholder_keys(): + knob = node.knob(key) + value = None + if knob is not None: + value = knob.getValue() + placeholder_data[key] = value + return placeholder_data + + +class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin): + identifier = "nuke.load" + label = "Nuke load" + + def _parse_placeholder_node_data(self, node): + placeholder_data = super( + NukePlaceholderLoadPlugin, self + )._parse_placeholder_node_data(node) + + node_knobs = node.knobs() + nb_children = 0 + if "nb_children" in node_knobs: + nb_children = int(node_knobs["nb_children"].getValue()) + placeholder_data["nb_children"] = nb_children + + siblings = [] + if "siblings" in node_knobs: + siblings = node_knobs["siblings"].values() + placeholder_data["siblings"] = siblings + + node_full_name = node.fullName() + placeholder_data["group_name"] = node_full_name.rpartition(".")[0] + placeholder_data["last_loaded"] = [] + placeholder_data["delete"] = False + return placeholder_data + + def _get_loaded_repre_ids(self): + loaded_representation_ids = self.builder.get_shared_populate_data( + "loaded_representation_ids" + ) + if loaded_representation_ids is None: + loaded_representation_ids = set() + for node in nuke.allNodes(): + if "repre_id" in node.knobs(): + loaded_representation_ids.add( + node.knob("repre_id").getValue() + ) + + self.builder.set_shared_populate_data( + "loaded_representation_ids", loaded_representation_ids + ) + return loaded_representation_ids + + def _before_repre_load(self, placeholder, representation): + placeholder.data["nodes_init"] = nuke.allNodes() + placeholder.data["last_repre_id"] = str(representation["_id"]) + + def collect_placeholders(self): + output = [] + scene_placeholders = self._collect_scene_placeholders() + for node_name, node in scene_placeholders.items(): + plugin_identifier_knob = node.knob("plugin_identifier") + if ( + plugin_identifier_knob is None + or plugin_identifier_knob.getValue() != self.identifier + ): + continue + + placeholder_data = self._parse_placeholder_node_data(node) + # TODO do data validations and maybe updgrades if are invalid + output.append( + LoadPlaceholderItem(node_name, placeholder_data, self) + ) + + return output + + def populate_placeholder(self, placeholder): + self.populate_load_placeholder(placeholder) + + def repopulate_placeholder(self, placeholder): + repre_ids = self._get_loaded_repre_ids() + self.populate_load_placeholder(placeholder, repre_ids) + + def get_placeholder_options(self, options=None): + return self.get_load_plugin_options(options) + + def cleanup_placeholder(self, placeholder, failed): + # deselect all selected nodes + placeholder_node = nuke.toNode(placeholder.scene_identifier) + + # getting the latest nodes added + # TODO get from shared populate data! + nodes_init = placeholder.data["nodes_init"] + nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init)) + self.log.debug("Loaded nodes: {}".format(nodes_loaded)) + if not nodes_loaded: + return + + placeholder.data["delete"] = True + + nodes_loaded = self._move_to_placeholder_group( + placeholder, nodes_loaded + ) + placeholder.data["last_loaded"] = nodes_loaded + refresh_nodes(nodes_loaded) + + # positioning of the loaded nodes + min_x, min_y, _, _ = get_extreme_positions(nodes_loaded) + for node in nodes_loaded: + xpos = (node.xpos() - min_x) + placeholder_node.xpos() + ypos = (node.ypos() - min_y) + placeholder_node.ypos() + node.setXYpos(xpos, ypos) + refresh_nodes(nodes_loaded) + + # fix the problem of z_order for backdrops + self._fix_z_order(placeholder) + self._imprint_siblings(placeholder) + + if placeholder.data["nb_children"] == 0: + # save initial nodes postions and dimensions, update them + # and set inputs and outputs of loaded nodes + + self._imprint_inits() + self._update_nodes(placeholder, nuke.allNodes(), nodes_loaded) + self._set_loaded_connections(placeholder) + + elif placeholder.data["siblings"]: + # create copies of placeholder siblings for the new loaded nodes, + # set their inputs and outpus and update all nodes positions and + # dimensions and siblings names + + siblings = get_nodes_by_names(placeholder.data["siblings"]) + refresh_nodes(siblings) + copies = self._create_sib_copies(placeholder) + new_nodes = list(copies.values()) # copies nodes + self._update_nodes(new_nodes, nodes_loaded) + placeholder_node.removeKnob(placeholder_node.knob("siblings")) + new_nodes_name = get_names_from_nodes(new_nodes) + imprint(placeholder_node, {"siblings": new_nodes_name}) + self._set_copies_connections(placeholder, copies) + + self._update_nodes( + nuke.allNodes(), + new_nodes + nodes_loaded, + 20 + ) + + new_siblings = get_names_from_nodes(new_nodes) + placeholder.data["siblings"] = new_siblings + + else: + # if the placeholder doesn't have siblings, the loaded + # nodes will be placed in a free space + + xpointer, ypointer = find_free_space_to_paste_nodes( + nodes_loaded, direction="bottom", offset=200 + ) + node = nuke.createNode("NoOp") + reset_selection() + nuke.delete(node) + for node in nodes_loaded: + xpos = (node.xpos() - min_x) + xpointer + ypos = (node.ypos() - min_y) + ypointer + node.setXYpos(xpos, ypos) + + placeholder.data["nb_children"] += 1 + reset_selection() + # go back to root group + nuke.root().begin() + + def _move_to_placeholder_group(self, placeholder, nodes_loaded): + """ + opening the placeholder's group and copying loaded nodes in it. + + Returns : + nodes_loaded (list): the new list of pasted nodes + """ + + groups_name = placeholder.data["group_name"] + reset_selection() + select_nodes(nodes_loaded) + if groups_name: + with node_tempfile() as filepath: + nuke.nodeCopy(filepath) + for node in nuke.selectedNodes(): + nuke.delete(node) + group = nuke.toNode(groups_name) + group.begin() + nuke.nodePaste(filepath) + nodes_loaded = nuke.selectedNodes() + return nodes_loaded + + def _fix_z_order(self, placeholder): + """Fix the problem of z_order when a backdrop is loaded.""" + + nodes_loaded = placeholder.data["last_loaded"] + loaded_backdrops = [] + bd_orders = set() + for node in nodes_loaded: + if isinstance(node, nuke.BackdropNode): + loaded_backdrops.append(node) + bd_orders.add(node.knob("z_order").getValue()) + + if not bd_orders: + return + + sib_orders = set() + for node_name in placeholder.data["siblings"]: + node = nuke.toNode(node_name) + if isinstance(node, nuke.BackdropNode): + sib_orders.add(node.knob("z_order").getValue()) + + if not sib_orders: + return + + min_order = min(bd_orders) + max_order = max(sib_orders) + for backdrop_node in loaded_backdrops: + z_order = backdrop_node.knob("z_order").getValue() + backdrop_node.knob("z_order").setValue( + z_order + max_order - min_order + 1) + + def _imprint_siblings(self, placeholder): + """ + - add siblings names to placeholder attributes (nodes loaded with it) + - add Id to the attributes of all the other nodes + """ + + loaded_nodes = placeholder.data["last_loaded"] + loaded_nodes_set = set(loaded_nodes) + data = {"repre_id": str(placeholder.data["last_repre_id"])} + + for node in loaded_nodes: + node_knobs = node.knobs() + if "builder_type" not in node_knobs: + # save the id of representation for all imported nodes + imprint(node, data) + node.knob("repre_id").setVisible(False) + refresh_node(node) + continue + + if ( + "is_placeholder" not in node_knobs + or ( + "is_placeholder" in node_knobs + and node.knob("is_placeholder").value() + ) + ): + siblings = list(loaded_nodes_set - {node}) + siblings_name = get_names_from_nodes(siblings) + siblings = {"siblings": siblings_name} + imprint(node, siblings) + + def _imprint_inits(self): + """Add initial positions and dimensions to the attributes""" + + for node in nuke.allNodes(): + refresh_node(node) + imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()}) + node.knob("x_init").setVisible(False) + node.knob("y_init").setVisible(False) + width = node.screenWidth() + height = node.screenHeight() + if "bdwidth" in node.knobs(): + imprint(node, {"w_init": width, "h_init": height}) + node.knob("w_init").setVisible(False) + node.knob("h_init").setVisible(False) + refresh_node(node) + + def _update_nodes( + self, placeholder, nodes, considered_nodes, offset_y=None + ): + """Adjust backdrop nodes dimensions and positions. + + Considering some nodes sizes. + + Args: + nodes (list): list of nodes to update + considered_nodes (list): list of nodes to consider while updating + positions and dimensions + offset (int): distance between copies + """ + + placeholder_node = nuke.toNode(placeholder.scene_identifier) + + min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes) + + diff_x = diff_y = 0 + contained_nodes = [] # for backdrops + + if offset_y is None: + width_ph = placeholder_node.screenWidth() + height_ph = placeholder_node.screenHeight() + diff_y = max_y - min_y - height_ph + diff_x = max_x - min_x - width_ph + contained_nodes = [placeholder_node] + min_x = placeholder_node.xpos() + min_y = placeholder_node.ypos() + else: + siblings = get_nodes_by_names(placeholder.data["siblings"]) + minX, _, maxX, _ = get_extreme_positions(siblings) + diff_y = max_y - min_y + 20 + diff_x = abs(max_x - min_x - maxX + minX) + contained_nodes = considered_nodes + + if diff_y <= 0 and diff_x <= 0: + return + + for node in nodes: + refresh_node(node) + + if ( + node == placeholder_node + or node in considered_nodes + ): + continue + + if ( + not isinstance(node, nuke.BackdropNode) + or ( + isinstance(node, nuke.BackdropNode) + and not set(contained_nodes) <= set(node.getNodes()) + ) + ): + if offset_y is None and node.xpos() >= min_x: + node.setXpos(node.xpos() + diff_x) + + if node.ypos() >= min_y: + node.setYpos(node.ypos() + diff_y) + + else: + width = node.screenWidth() + height = node.screenHeight() + node.knob("bdwidth").setValue(width + diff_x) + node.knob("bdheight").setValue(height + diff_y) + + refresh_node(node) + + def _set_loaded_connections(self, placeholder): + """ + set inputs and outputs of loaded nodes""" + + placeholder_node = nuke.toNode(placeholder.scene_identifier) + input_node, output_node = get_group_io_nodes( + placeholder.data["last_loaded"] + ) + for node in placeholder_node.dependent(): + for idx in range(node.inputs()): + if node.input(idx) == placeholder_node: + node.setInput(idx, output_node) + + for node in placeholder_node.dependencies(): + for idx in range(placeholder_node.inputs()): + if placeholder_node.input(idx) == node: + input_node.setInput(0, node) + + def _create_sib_copies(self, placeholder): + """ creating copies of the palce_holder siblings (the ones who were + loaded with it) for the new nodes added + + Returns : + copies (dict) : with copied nodes names and their copies + """ + + copies = {} + siblings = get_nodes_by_names(placeholder.data["siblings"]) + for node in siblings: + new_node = duplicate_node(node) + + x_init = int(new_node.knob("x_init").getValue()) + y_init = int(new_node.knob("y_init").getValue()) + new_node.setXYpos(x_init, y_init) + if isinstance(new_node, nuke.BackdropNode): + w_init = new_node.knob("w_init").getValue() + h_init = new_node.knob("h_init").getValue() + new_node.knob("bdwidth").setValue(w_init) + new_node.knob("bdheight").setValue(h_init) + refresh_node(node) + + if "repre_id" in node.knobs().keys(): + node.removeKnob(node.knob("repre_id")) + copies[node.name()] = new_node + return copies + + def _set_copies_connections(self, placeholder, copies): + """Set inputs and outputs of the copies. + + Args: + copies (dict): Copied nodes by their names. + """ + + last_input, last_output = get_group_io_nodes( + placeholder.data["last_loaded"] + ) + siblings = get_nodes_by_names(placeholder.data["siblings"]) + siblings_input, siblings_output = get_group_io_nodes(siblings) + copy_input = copies[siblings_input.name()] + copy_output = copies[siblings_output.name()] + + for node_init in siblings: + if node_init == siblings_output: + continue + + node_copy = copies[node_init.name()] + for node in node_init.dependent(): + for idx in range(node.inputs()): + if node.input(idx) != node_init: + continue + + if node in siblings: + copies[node.name()].setInput(idx, node_copy) + else: + last_input.setInput(0, node_copy) + + for node in node_init.dependencies(): + for idx in range(node_init.inputs()): + if node_init.input(idx) != node: + continue + + if node_init == siblings_input: + copy_input.setInput(idx, node) + elif node in siblings: + node_copy.setInput(idx, copies[node.name()]) + else: + node_copy.setInput(idx, last_output) + + siblings_input.setInput(0, copy_output) + + +def build_workfile_template(*args): + builder = NukeTemplateBuilder(registered_host()) + builder.build_template() + + +def update_workfile_template(*args): + builder = NukeTemplateBuilder(registered_host()) + builder.rebuild_template() + + +def create_placeholder(*args): + host = registered_host() + builder = NukeTemplateBuilder(host) + window = WorkfileBuildPlaceholderDialog(host, builder) + window.exec_() + + +def update_placeholder(*args): + host = registered_host() + builder = NukeTemplateBuilder(host) + placeholder_items_by_id = { + placeholder_item.scene_identifier: placeholder_item + for placeholder_item in builder.get_placeholders() + } + placeholder_items = [] + for node in nuke.selectedNodes(): + node_name = node.fullName() + if node_name in placeholder_items_by_id: + placeholder_items.append(placeholder_items_by_id[node_name]) + + # TODO show UI at least + if len(placeholder_items) == 0: + raise ValueError("No node selected") + + if len(placeholder_items) > 1: + raise ValueError("Too many selected nodes") + + placeholder_item = placeholder_items[0] + window = WorkfileBuildPlaceholderDialog(host, builder) + window.set_update_mode(placeholder_item) + window.exec_() diff --git a/openpype/hosts/nuke/api/workio.py b/openpype/hosts/nuke/api/workio.py index 68fcb0927f..65b86bf01b 100644 --- a/openpype/hosts/nuke/api/workio.py +++ b/openpype/hosts/nuke/api/workio.py @@ -2,11 +2,9 @@ import os import nuke -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS - def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["nuke"] + return [".nk"] def has_unsaved_changes(): diff --git a/openpype/hosts/nuke/plugins/inventory/repair_old_loaders.py b/openpype/hosts/nuke/plugins/inventory/repair_old_loaders.py index c04c939a8d..764499ff0c 100644 --- a/openpype/hosts/nuke/plugins/inventory/repair_old_loaders.py +++ b/openpype/hosts/nuke/plugins/inventory/repair_old_loaders.py @@ -1,4 +1,4 @@ -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import InventoryAction from openpype.hosts.nuke.api.lib import set_avalon_knob_data diff --git a/openpype/hosts/nuke/plugins/load/actions.py b/openpype/hosts/nuke/plugins/load/actions.py index d364a4f3a1..69f56c7305 100644 --- a/openpype/hosts/nuke/plugins/load/actions.py +++ b/openpype/hosts/nuke/plugins/load/actions.py @@ -2,10 +2,10 @@ """ -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import load -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) class SetFrameRangeLoader(load.LoaderPlugin): diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index 346773b5af..654ea367c8 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -425,7 +425,7 @@ class LoadClip(plugin.NukeLoader): colorspace = repre_data.get("colorspace") colorspace = colorspace or version_data.get("colorspace") - # colorspace from `project_anatomy/imageio/nuke/regexInputs` + # colorspace from `project_settings/nuke/imageio/regexInputs` iio_colorspace = get_imageio_input_colorspace(path) # Set colorspace defined in version data diff --git a/openpype/hosts/nuke/plugins/publish/extract_backdrop.py b/openpype/hosts/nuke/plugins/publish/extract_backdrop.py index 0a2df0898e..d1e5c4cc5a 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/extract_backdrop.py @@ -4,7 +4,7 @@ import nuke import pyblish.api -import openpype +from openpype.pipeline import publish from openpype.hosts.nuke.api.lib import ( maintained_selection, reset_selection, @@ -12,7 +12,7 @@ from openpype.hosts.nuke.api.lib import ( ) -class ExtractBackdropNode(openpype.api.Extractor): +class ExtractBackdropNode(publish.Extractor): """Extracting content of backdrop nodes Will create nuke script only with containing nodes. diff --git a/openpype/hosts/nuke/plugins/publish/extract_camera.py b/openpype/hosts/nuke/plugins/publish/extract_camera.py index 54f65a0be3..b751bfab03 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_camera.py +++ b/openpype/hosts/nuke/plugins/publish/extract_camera.py @@ -5,11 +5,12 @@ from pprint import pformat import nuke import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.nuke.api.lib import maintained_selection -class ExtractCamera(openpype.api.Extractor): +class ExtractCamera(publish.Extractor): """ 3D camera exctractor """ label = 'Exctract Camera' diff --git a/openpype/hosts/nuke/plugins/publish/extract_gizmo.py b/openpype/hosts/nuke/plugins/publish/extract_gizmo.py index 2d5bfdeb5e..3047ad6724 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/extract_gizmo.py @@ -3,7 +3,7 @@ import nuke import pyblish.api -import openpype +from openpype.pipeline import publish from openpype.hosts.nuke.api import utils as pnutils from openpype.hosts.nuke.api.lib import ( maintained_selection, @@ -12,7 +12,7 @@ from openpype.hosts.nuke.api.lib import ( ) -class ExtractGizmo(openpype.api.Extractor): +class ExtractGizmo(publish.Extractor): """Extracting Gizmo (Group) node Will create nuke script only with the Gizmo node. diff --git a/openpype/hosts/nuke/plugins/publish/extract_model.py b/openpype/hosts/nuke/plugins/publish/extract_model.py index 0375263338..d82cb3110b 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_model.py +++ b/openpype/hosts/nuke/plugins/publish/extract_model.py @@ -2,14 +2,15 @@ import os from pprint import pformat import nuke import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.nuke.api.lib import ( maintained_selection, select_nodes ) -class ExtractModel(openpype.api.Extractor): +class ExtractModel(publish.Extractor): """ 3D model exctractor """ label = 'Exctract Model' diff --git a/openpype/hosts/nuke/plugins/publish/extract_render_local.py b/openpype/hosts/nuke/plugins/publish/extract_render_local.py index 8879f0c999..843d588786 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_render_local.py +++ b/openpype/hosts/nuke/plugins/publish/extract_render_local.py @@ -1,11 +1,13 @@ -import pyblish.api -import nuke import os -import openpype + +import pyblish.api import clique +import nuke + +from openpype.pipeline import publish -class NukeRenderLocal(openpype.api.Extractor): +class NukeRenderLocal(publish.Extractor): # TODO: rewrite docstring to nuke """Render the current Nuke composition locally. diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data.py b/openpype/hosts/nuke/plugins/publish/extract_review_data.py index 38a8140cff..3c85b21b08 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data.py @@ -1,10 +1,11 @@ import os -import pyblish.api -import openpype from pprint import pformat +import pyblish.api + +from openpype.pipeline import publish -class ExtractReviewData(openpype.api.Extractor): +class ExtractReviewData(publish.Extractor): """Extracts review tag into available representation """ diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py index 4cf2fd7d9f..67779e9599 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py @@ -1,11 +1,12 @@ import os import pyblish.api -import openpype + +from openpype.pipeline import publish from openpype.hosts.nuke.api import plugin from openpype.hosts.nuke.api.lib import maintained_selection -class ExtractReviewDataLut(openpype.api.Extractor): +class ExtractReviewDataLut(publish.Extractor): """Extracts movie and thumbnail with baked in luts must be run after extract_render_local.py diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index fc16e189fb..3fcfc2a4b5 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -1,13 +1,14 @@ import os -from pprint import pformat import re +from pprint import pformat import pyblish.api -import openpype + +from openpype.pipeline import publish from openpype.hosts.nuke.api import plugin from openpype.hosts.nuke.api.lib import maintained_selection -class ExtractReviewDataMov(openpype.api.Extractor): +class ExtractReviewDataMov(publish.Extractor): """Extracts movie and thumbnail with baked in luts must be run after extract_render_local.py diff --git a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py index b5cad143db..e7197b4fa8 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -6,7 +6,7 @@ import copy import pyblish.api import six -import openpype +from openpype.pipeline import publish from openpype.hosts.nuke.api import ( maintained_selection, duplicate_node, @@ -14,7 +14,7 @@ from openpype.hosts.nuke.api import ( ) -class ExtractSlateFrame(openpype.api.Extractor): +class ExtractSlateFrame(publish.Extractor): """Extracts movie and thumbnail with baked in luts must be run after extract_render_local.py diff --git a/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py b/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py index 2a919051d2..19eae9638b 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py @@ -2,7 +2,8 @@ import sys import os import nuke import pyblish.api -import openpype + +from openpype.pipeline import publish from openpype.hosts.nuke.api import ( maintained_selection, get_view_process_node @@ -13,7 +14,7 @@ if sys.version_info[0] >= 3: unicode = str -class ExtractThumbnail(openpype.api.Extractor): +class ExtractThumbnail(publish.Extractor): """Extracts movie and thumbnail with baked in luts must be run after extract_render_local.py diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml b/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml new file mode 100644 index 0000000000..1097909a5f --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml @@ -0,0 +1,18 @@ + + + + Shot/Asset mame + +## Invalid Shot/Asset name in subset + +Following Node with name `{node_name}`: +Is in context of `{correct_name}` but Node _asset_ knob is set as `{wrong_name}`. + +### How to repair? + +1. Either use Repair or Select button. +2. If you chose Select then rename asset knob to correct name. +3. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_backdrop.xml b/openpype/hosts/nuke/plugins/publish/help/validate_backdrop.xml new file mode 100644 index 0000000000..ab1b650773 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_backdrop.xml @@ -0,0 +1,36 @@ + + + + Found multiple outputs + +## Invalid output amount + +Backdrop is having more than one outgoing connections. + +### How to repair? + +1. Use button `Center node in node graph` and navigate to the backdrop. +2. Reorganize nodes the way only one outgoing connection is present. +3. Hit reload button on the publisher. + + +### How could this happen? + +More than one node, which are found above the backdrop, are linked downstream or more output connections from a node also linked downstream. + + + + Empty backdrop + +## Invalid empty backdrop + +Backdrop is empty and no nodes are found above it. + +### How to repair? + +1. Use button `Center node in node graph` and navigate to the backdrop. +2. Add any node above it or delete it. +3. Hit reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_gizmo.xml b/openpype/hosts/nuke/plugins/publish/help/validate_gizmo.xml new file mode 100644 index 0000000000..f39a41a4f9 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_gizmo.xml @@ -0,0 +1,36 @@ + + + + Found multiple outputs + +## Invalid amount of Output nodes + +Group node `{node_name}` is having more than one Output node. + +### How to repair? + +1. Use button `Open Group`. +2. Remove redundant Output node. +3. Hit reload button on the publisher. + + +### How could this happen? + +Perhaps you had created exciently more than one Output node. + + + + Missing Input nodes + +## Missing Input nodes + +Make sure there is at least one connected Input node inside the group node with name `{node_name}` + +### How to repair? + +1. Use button `Open Group`. +2. Add at least one Input node and connect to other nodes. +3. Hit reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_knobs.xml b/openpype/hosts/nuke/plugins/publish/help/validate_knobs.xml new file mode 100644 index 0000000000..76c184f653 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_knobs.xml @@ -0,0 +1,18 @@ + + + + Knobs value + +## Invalid node's knobs values + +Following node knobs needs to be repaired: + +{invalid_items} + +### How to repair? + +1. Use Repair button. +2. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_output_resolution.xml b/openpype/hosts/nuke/plugins/publish/help/validate_output_resolution.xml new file mode 100644 index 0000000000..08a88a993e --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_output_resolution.xml @@ -0,0 +1,16 @@ + + + + Output format + +## Invalid format setting + +Either the Reformat node inside of the render group is missing or the Reformat node output format knob is not set to `root.format`. + +### How to repair? + +1. Use Repair button. +2. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_proxy_mode.xml b/openpype/hosts/nuke/plugins/publish/help/validate_proxy_mode.xml new file mode 100644 index 0000000000..6fe5d5d43e --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_proxy_mode.xml @@ -0,0 +1,16 @@ + + + + Proxy mode + +## Invalid proxy mode value + +Nuke is set to use Proxy. This is not supported by publisher. + +### How to repair? + +1. Use Repair button. +2. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_rendered_frames.xml b/openpype/hosts/nuke/plugins/publish/help/validate_rendered_frames.xml new file mode 100644 index 0000000000..434081c269 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_rendered_frames.xml @@ -0,0 +1,17 @@ + + + + Rendered Frames + +## Missing Rendered Frames + +Render node "{node_name}" is set to "Use existing frames", but frames are missing. + +### How to repair? + +1. Use Repair button. +2. Set different target. +2. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_script_attributes.xml b/openpype/hosts/nuke/plugins/publish/help/validate_script_attributes.xml new file mode 100644 index 0000000000..871fc629ce --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_script_attributes.xml @@ -0,0 +1,18 @@ + + + + Script attributes + +## Invalid Script attributes + +Following script root attributes need to be fixed: + +{failed_attributes} + +### How to repair? + +1. Use Repair. +2. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml b/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml new file mode 100644 index 0000000000..cdf85102bc --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml @@ -0,0 +1,18 @@ + + + + Knobs values + +## Invalid node's knobs values + +Following write node knobs needs to be repaired: + +{xml_msg} + +### How to repair? + +1. Use Repair button. +2. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/precollect_workfile.py b/openpype/hosts/nuke/plugins/publish/precollect_workfile.py index 7349a8f424..316c651b66 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_workfile.py @@ -3,11 +3,13 @@ import os import nuke import pyblish.api -import openpype.api as pype + +from openpype.lib import get_version_from_path from openpype.hosts.nuke.api.lib import ( add_publish_knob, get_avalon_knob_data ) +from openpype.pipeline import KnownPublishError class CollectWorkfile(pyblish.api.ContextPlugin): @@ -22,6 +24,12 @@ class CollectWorkfile(pyblish.api.ContextPlugin): current_file = os.path.normpath(nuke.root().name()) + if current_file.lower() == "root": + raise KnownPublishError( + "Workfile is not correct file name. \n" + "Use workfile tool to manage the name correctly." + ) + knob_data = get_avalon_knob_data(root) add_publish_knob(root) @@ -67,7 +75,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): "fps": root['fps'].value(), "currentFile": current_file, - "version": int(pype.get_version_from_path(current_file)), + "version": int(get_version_from_path(current_file)), "host": pyblish.api.current_host(), "hostVersion": nuke.NUKE_VERSION_STRING diff --git a/openpype/hosts/nuke/plugins/publish/precollect_writes.py b/openpype/hosts/nuke/plugins/publish/precollect_writes.py index e37cc8a80a..17c4bc30cf 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_writes.py @@ -201,34 +201,6 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): if not instance.data["review"]: instance.data["useSequenceForReview"] = False - project_name = legacy_io.active_project() - asset_name = instance.data["asset"] - # * Add audio to instance if exists. - # Find latest versions document - last_version_doc = get_last_version_by_subset_name( - project_name, "audioMain", asset_name=asset_name, fields=["_id"] - ) - - repre_doc = None - if last_version_doc: - # Try to find it's representation (Expected there is only one) - repre_docs = list(get_representations( - project_name, version_ids=[last_version_doc["_id"]] - )) - if not repre_docs: - self.log.warning( - "Version document does not contain any representations" - ) - else: - repre_doc = repre_docs[0] - - # Add audio to instance if representation was found - if repre_doc: - instance.data["audio"] = [{ - "offset": 0, - "filename": get_representation_path(repre_doc) - }] - self.log.debug("instance.data: {}".format(pformat(instance.data))) def is_prerender(self, families): diff --git a/openpype/hosts/nuke/plugins/publish/validate_instance_in_context.py b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py similarity index 72% rename from openpype/hosts/nuke/plugins/publish/validate_instance_in_context.py rename to openpype/hosts/nuke/plugins/publish/validate_asset_name.py index 842f74b6f6..52731140ff 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_instance_in_context.py +++ b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py @@ -3,20 +3,20 @@ from __future__ import absolute_import import nuke - import pyblish.api -import openpype.api -from openpype.hosts.nuke.api.lib import ( - recreate_instance, - reset_selection, - select_nodes + +import openpype.hosts.nuke.api.lib as nlib +import openpype.hosts.nuke.api as nuke_api +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, ) class SelectInvalidInstances(pyblish.api.Action): """Select invalid instances in Outliner.""" - label = "Select Instances" + label = "Select" icon = "briefcase" on = "failed" @@ -39,6 +39,7 @@ class SelectInvalidInstances(pyblish.api.Action): instances = pyblish.api.instances_by_plugin(failed, plugin) if instances: + self.deselect() self.log.info( "Selecting invalid nodes: %s" % ", ".join( [str(x) for x in instances] @@ -50,12 +51,12 @@ class SelectInvalidInstances(pyblish.api.Action): self.deselect() def select(self, instances): - select_nodes( + nlib.select_nodes( [nuke.toNode(str(x)) for x in instances] ) def deselect(self): - reset_selection() + nlib.reset_selection() class RepairSelectInvalidInstances(pyblish.api.Action): @@ -85,12 +86,12 @@ class RepairSelectInvalidInstances(pyblish.api.Action): context_asset = context.data["assetEntity"]["name"] for instance in instances: origin_node = instance[0] - recreate_instance( + nuke_api.lib.recreate_instance( origin_node, avalon_data={"asset": context_asset} ) -class ValidateInstanceInContext(pyblish.api.InstancePlugin): +class ValidateCorrectAssetName(pyblish.api.InstancePlugin): """Validator to check if instance asset match context asset. When working in per-shot style you always publish data in context of @@ -99,15 +100,31 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin): Action on this validator will select invalid instances in Outliner. """ - - order = openpype.api.ValidateContentsOrder - label = "Instance in same Context" + order = ValidateContentsOrder + label = "Validate correct asset name" hosts = ["nuke"] - actions = [SelectInvalidInstances, RepairSelectInvalidInstances] + actions = [ + SelectInvalidInstances, + RepairSelectInvalidInstances + ] optional = True def process(self, instance): asset = instance.data.get("asset") context_asset = instance.context.data["assetEntity"]["name"] - msg = "{} has asset {}".format(instance.name, asset) - assert asset == context_asset, msg + + msg = ( + "Instance `{}` has wrong shot/asset name:\n" + "Correct: `{}` | Wrong: `{}`").format( + instance.name, asset, context_asset) + + self.log.debug(msg) + + if asset != context_asset: + raise PublishXmlValidationError( + self, msg, formatting_data={ + "node_name": instance[0]["name"].value(), + "wrong_name": asset, + "correct_name": context_asset + } + ) diff --git a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py index e2843d146e..17dc79dc56 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py @@ -1,6 +1,7 @@ import nuke import pyblish from openpype.hosts.nuke.api.lib import maintained_selection +from openpype.pipeline import PublishXmlValidationError class SelectCenterInNodeGraph(pyblish.api.Action): @@ -47,8 +48,9 @@ class SelectCenterInNodeGraph(pyblish.api.Action): @pyblish.api.log class ValidateBackdrop(pyblish.api.InstancePlugin): - """Validate amount of nodes on backdrop node in case user - forgotten to add nodes above the publishing backdrop node""" + """ Validate amount of nodes on backdrop node in case user + forgoten to add nodes above the publishing backdrop node. + """ order = pyblish.api.ValidatorOrder optional = True @@ -63,8 +65,25 @@ class ValidateBackdrop(pyblish.api.InstancePlugin): msg_multiple_outputs = ( "Only one outcoming connection from " "\"{}\" is allowed").format(instance.data["name"]) - assert len(connections_out.keys()) <= 1, msg_multiple_outputs - msg_no_content = "No content on backdrop node: \"{}\"".format( + if len(connections_out.keys()) > 1: + raise PublishXmlValidationError( + self, + msg_multiple_outputs, + "multiple_outputs" + ) + + msg_no_nodes = "No content on backdrop node: \"{}\"".format( instance.data["name"]) - assert len(instance) > 1, msg_no_content + + self.log.debug( + "Amount of nodes on instance: {}".format( + len(instance)) + ) + + if len(instance) == 1: + raise PublishXmlValidationError( + self, + msg_no_nodes, + "no_nodes" + ) diff --git a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py index d0d930f50c..2321bd1fd4 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py @@ -1,6 +1,7 @@ -import nuke import pyblish -from openpype.hosts.nuke.api.lib import maintained_selection +from openpype.pipeline import PublishXmlValidationError +from openpype.hosts.nuke.api import maintained_selection +import nuke class OpenFailedGroupNode(pyblish.api.Action): @@ -8,7 +9,7 @@ class OpenFailedGroupNode(pyblish.api.Action): Centering failed instance node in node grap """ - label = "Open Gizmo in Node Graph" + label = "Open Group" icon = "wrench" on = "failed" @@ -48,11 +49,23 @@ class ValidateGizmo(pyblish.api.InstancePlugin): with grpn: connections_out = nuke.allNodes('Output') - msg_multiple_outputs = "Only one outcoming connection from " - "\"{}\" is allowed".format(instance.data["name"]) - assert len(connections_out) <= 1, msg_multiple_outputs + msg_multiple_outputs = ( + "Only one outcoming connection from " + "\"{}\" is allowed").format(instance.data["name"]) + + if len(connections_out) > 1: + raise PublishXmlValidationError( + self, msg_multiple_outputs, "multiple_outputs", + {"node_name": grpn["name"].value()} + ) connections_in = nuke.allNodes('Input') - msg_missing_inputs = "At least one Input node has to be used in: " - "\"{}\"".format(instance.data["name"]) - assert len(connections_in) >= 1, msg_missing_inputs + msg_missing_inputs = ( + "At least one Input node has to be inside Group: " + "\"{}\"").format(instance.data["name"]) + + if len(connections_in) == 0: + raise PublishXmlValidationError( + self, msg_missing_inputs, "no_inputs", + {"node_name": grpn["name"].value()} + ) diff --git a/openpype/hosts/nuke/plugins/publish/validate_knobs.py b/openpype/hosts/nuke/plugins/publish/validate_knobs.py index d290ff4541..d44f27791a 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_knobs.py +++ b/openpype/hosts/nuke/plugins/publish/validate_knobs.py @@ -1,7 +1,11 @@ import nuke - +import six import pyblish.api -import openpype.api + +from openpype.pipeline.publish import ( + RepairContextAction, + PublishXmlValidationError, +) class ValidateKnobs(pyblish.api.ContextPlugin): @@ -23,15 +27,25 @@ class ValidateKnobs(pyblish.api.ContextPlugin): order = pyblish.api.ValidatorOrder label = "Validate Knobs" hosts = ["nuke"] - actions = [openpype.api.RepairContextAction] + actions = [RepairContextAction] optional = True def process(self, context): - invalid = self.get_invalid(context, compute=True) if invalid: - raise RuntimeError( - "Found knobs with invalid values:\n{}".format(invalid) + invalid_items = [ + ( + "Node __{node_name}__ with knob _{label}_ " + "expecting _{expected}_, " + "but is set to _{current}_" + ).format(**i) + for i in invalid + ] + raise PublishXmlValidationError( + self, + "Found knobs with invalid values:\n{}".format(invalid), + formatting_data={ + "invalid_items": "\n".join(invalid_items)} ) @classmethod @@ -54,15 +68,24 @@ class ValidateKnobs(pyblish.api.ContextPlugin): # Filter families. families = [instance.data["family"]] families += instance.data.get("families", []) - families = list(set(families) & set(cls.knobs.keys())) + if not families: continue # Get all knobs to validate. knobs = {} for family in families: + # check if dot in family + if "." in family: + family = family.split(".")[0] + + # avoid families not in settings + if family not in cls.knobs: + continue + + # get presets of knobs for preset in cls.knobs[family]: - knobs.update({preset: cls.knobs[family][preset]}) + knobs[preset] = cls.knobs[family][preset] # Get invalid knobs. nodes = [] @@ -71,8 +94,7 @@ class ValidateKnobs(pyblish.api.ContextPlugin): nodes.append(node) if node.Class() == "Group": node.begin() - for i in nuke.allNodes(): - nodes.append(i) + nodes.extend(iter(nuke.allNodes())) node.end() for node in nodes: @@ -84,6 +106,7 @@ class ValidateKnobs(pyblish.api.ContextPlugin): if node[knob].value() != expected: invalid_knobs.append( { + "node_name": node.name(), "knob": node[knob], "name": node[knob].name(), "label": node[knob].label(), @@ -99,7 +122,9 @@ class ValidateKnobs(pyblish.api.ContextPlugin): def repair(cls, instance): invalid = cls.get_invalid(instance) for data in invalid: - if isinstance(data["expected"], unicode): + # TODO: will need to improve type definitions + # with the new settings for knob types + if isinstance(data["expected"], six.text_type): data["knob"].setValue(str(data["expected"])) continue diff --git a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py index 27094b8d74..1e59880f90 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py +++ b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py @@ -1,43 +1,9 @@ -import nuke - import pyblish.api - -class RepairWriteResolutionDifference(pyblish.api.Action): - - label = "Repair" - icon = "wrench" - on = "failed" - - def process(self, context, plugin): - - # Get the errored instances - failed = [] - for result in context.data["results"]: - if (result["error"] is not None and result["instance"] is not None - and result["instance"] not in failed): - failed.append(result["instance"]) - - # Apply pyblish.logic to get the instances for the plug-in - instances = pyblish.api.instances_by_plugin(failed, plugin) - - for instance in instances: - reformat = instance[0].dependencies()[0] - if reformat.Class() != "Reformat": - reformat = nuke.nodes.Reformat(inputs=[instance[0].input(0)]) - - xpos = instance[0].xpos() - ypos = instance[0].ypos() - 26 - - dependent_ypos = instance[0].dependencies()[0].ypos() - if (instance[0].ypos() - dependent_ypos) <= 51: - xpos += 110 - - reformat.setXYpos(xpos, ypos) - - instance[0].setInput(0, reformat) - - reformat["resize"].setValue("none") +from openpype.hosts.nuke.api import maintained_selection +from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import RepairAction +import nuke class ValidateOutputResolution(pyblish.api.InstancePlugin): @@ -52,27 +18,75 @@ class ValidateOutputResolution(pyblish.api.InstancePlugin): families = ["render", "render.local", "render.farm"] label = "Write Resolution" hosts = ["nuke"] - actions = [RepairWriteResolutionDifference] + actions = [RepairAction] + + missing_msg = "Missing Reformat node in render group node" + resolution_msg = "Reformat is set to wrong format" def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise PublishXmlValidationError(self, invalid) - # Skip bounding box check if a reformat node exists. - if instance[0].dependencies()[0].Class() == "Reformat": - return + @classmethod + def get_reformat(cls, instance): + reformat = None + for inode in instance: + if inode.Class() != "Reformat": + continue + reformat = inode - msg = "Bounding box is outside the format." - assert self.check_resolution(instance), msg + return reformat - def check_resolution(self, instance): - node = instance[0] + @classmethod + def get_invalid(cls, instance): + def _check_resolution(instance, reformat): + root_width = instance.data["resolutionWidth"] + root_height = instance.data["resolutionHeight"] - root_width = instance.data["resolutionWidth"] - root_height = instance.data["resolutionHeight"] + write_width = reformat.format().width() + write_height = reformat.format().height() - write_width = node.format().width() - write_height = node.format().height() + if (root_width != write_width) or (root_height != write_height): + return None + else: + return True - if (root_width != write_width) or (root_height != write_height): - return None - else: - return True + # check if reformat is in render node + reformat = cls.get_reformat(instance) + if not reformat: + return cls.missing_msg + + # check if reformat is set to correct root format + correct_format = _check_resolution(instance, reformat) + if not correct_format: + return cls.resolution_msg + + @classmethod + def repair(cls, instance): + invalid = cls.get_invalid(instance) + grp_node = instance[0] + + if cls.missing_msg == invalid: + # make sure we are inside of the group node + with grp_node: + # find input node and select it + _input = None + for inode in instance: + if inode.Class() != "Input": + continue + _input = inode + + # add reformat node under it + with maintained_selection(): + _input['selected'].setValue(True) + _rfn = nuke.createNode("Reformat", "name Reformat01") + _rfn["resize"].setValue(0) + _rfn["black_outside"].setValue(1) + + cls.log.info("I am adding reformat node") + + if cls.resolution_msg == invalid: + reformat = cls.get_reformat(instance) + reformat["format"].setValue(nuke.root()["format"].value()) + cls.log.info("I am fixing reformat to root.format") diff --git a/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py b/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py index 9c6ca03ffd..dac240ad19 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py +++ b/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py @@ -1,5 +1,6 @@ import pyblish import nuke +from openpype.pipeline import PublishXmlValidationError class FixProxyMode(pyblish.api.Action): @@ -7,7 +8,7 @@ class FixProxyMode(pyblish.api.Action): Togger off proxy switch OFF """ - label = "Proxy toggle to OFF" + label = "Repair" icon = "wrench" on = "failed" @@ -30,4 +31,7 @@ class ValidateProxyMode(pyblish.api.ContextPlugin): rootNode = nuke.root() isProxy = rootNode["proxy"].value() - assert not isProxy, "Proxy mode should be toggled OFF" + if isProxy: + raise PublishXmlValidationError( + self, "Proxy mode should be toggled OFF" + ) diff --git a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py index 5f7b1f3806..237ff423e5 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py +++ b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py @@ -1,7 +1,7 @@ import os import pyblish.api -from openpype.api import ValidationException import clique +from openpype.pipeline import PublishXmlValidationError @pyblish.api.log @@ -36,7 +36,7 @@ class RepairActionBase(pyblish.api.Action): class RepairCollectionActionToLocal(RepairActionBase): - label = "Repair > rerender with `Local` machine" + label = "Repair - rerender with \"Local\"" def process(self, context, plugin): instances = self.get_instance(context, plugin) @@ -44,7 +44,7 @@ class RepairCollectionActionToLocal(RepairActionBase): class RepairCollectionActionToFarm(RepairActionBase): - label = "Repair > rerender `On farm` with remote machines" + label = "Repair - rerender with \"On farm\"" def process(self, context, plugin): instances = self.get_instance(context, plugin) @@ -63,6 +63,10 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): def process(self, instance): + f_data = { + "node_name": instance[0]["name"].value() + } + for repre in instance.data["representations"]: if not repre.get("files"): @@ -71,7 +75,8 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): "Check properties of write node (group) and" "select 'Local' option in 'Publish' dropdown.") self.log.error(msg) - raise ValidationException(msg) + raise PublishXmlValidationError( + self, msg, formatting_data=f_data) if isinstance(repre["files"], str): return @@ -82,21 +87,23 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): collection = collections[0] - fstartH = instance.data["frameStartHandle"] - fendH = instance.data["frameEndHandle"] + f_start_h = instance.data["frameStartHandle"] + f_end_h = instance.data["frameEndHandle"] - frame_length = int(fendH - fstartH + 1) + frame_length = int(f_end_h - f_start_h + 1) if frame_length != 1: if len(collections) != 1: msg = "There are multiple collections in the folder" self.log.error(msg) - raise ValidationException(msg) + raise PublishXmlValidationError( + self, msg, formatting_data=f_data) if not collection.is_contiguous(): msg = "Some frames appear to be missing" self.log.error(msg) - raise ValidationException(msg) + raise PublishXmlValidationError( + self, msg, formatting_data=f_data) collected_frames_len = len(collection.indexes) coll_start = min(collection.indexes) @@ -105,7 +112,8 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): self.log.info("frame_length: {}".format(frame_length)) self.log.info("collected_frames_len: {}".format( collected_frames_len)) - self.log.info("fstartH-fendH: {}-{}".format(fstartH, fendH)) + self.log.info("f_start_h-f_end_h: {}-{}".format( + f_start_h, f_end_h)) self.log.info( "coll_start-coll_end: {}-{}".format(coll_start, coll_end)) @@ -116,13 +124,19 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): if ("slate" in instance.data["families"]) \ and (frame_length != collected_frames_len): collected_frames_len -= 1 - fstartH += 1 + f_start_h += 1 - assert ((collected_frames_len >= frame_length) - and (coll_start <= fstartH) - and (coll_end >= fendH)), ( - "{} missing frames. Use repair to render all frames" - ).format(__name__) + if ( + collected_frames_len != frame_length + and coll_start <= f_start_h + and coll_end >= f_end_h + ): + raise PublishXmlValidationError( + self, ( + "{} missing frames. Use repair to " + "render all frames" + ).format(__name__), formatting_data=f_data + ) instance.data["collection"] = collection diff --git a/openpype/hosts/nuke/plugins/publish/validate_script.py b/openpype/hosts/nuke/plugins/publish/validate_script.py deleted file mode 100644 index b8d7494b9d..0000000000 --- a/openpype/hosts/nuke/plugins/publish/validate_script.py +++ /dev/null @@ -1,156 +0,0 @@ -import pyblish.api - -from openpype.client import get_project, get_asset_by_id, get_asset_by_name -from openpype.pipeline import legacy_io - - -@pyblish.api.log -class ValidateScript(pyblish.api.InstancePlugin): - """ Validates file output. """ - - order = pyblish.api.ValidatorOrder + 0.1 - families = ["workfile"] - label = "Check script settings" - hosts = ["nuke"] - optional = True - - def process(self, instance): - ctx_data = instance.context.data - project_name = legacy_io.active_project() - asset_name = ctx_data["asset"] - # TODO repace query with using 'instance.data["assetEntity"]' - asset = get_asset_by_name(project_name, asset_name) - asset_data = asset["data"] - - # These attributes will be checked - attributes = [ - "fps", - "frameStart", - "frameEnd", - "resolutionWidth", - "resolutionHeight", - "handleStart", - "handleEnd" - ] - - # Value of these attributes can be found on parents - hierarchical_attributes = [ - "fps", - "resolutionWidth", - "resolutionHeight", - "pixelAspect", - "handleStart", - "handleEnd" - ] - - missing_attributes = [] - asset_attributes = {} - for attr in attributes: - if attr in asset_data: - asset_attributes[attr] = asset_data[attr] - - elif attr in hierarchical_attributes: - # TODO this should be probably removed - # Hierarchical attributes is not a thing since Pype 2? - - # Try to find attribute on parent - parent_id = asset['parent'] - parent_type = "project" - if asset_data['visualParent'] is not None: - parent_type = "asset" - parent_id = asset_data['visualParent'] - - value = self.check_parent_hierarchical( - project_name, parent_type, parent_id, attr - ) - if value is None: - missing_attributes.append(attr) - else: - asset_attributes[attr] = value - else: - missing_attributes.append(attr) - - # Raise error if attributes weren't found on asset in database - if len(missing_attributes) > 0: - atr = ", ".join(missing_attributes) - msg = 'Missing attributes "{}" in asset "{}"' - message = msg.format(atr, asset_name) - raise ValueError(message) - - # Get handles from database, Default is 0 (if not found) - handle_start = 0 - handle_end = 0 - if "handleStart" in asset_attributes: - handle_start = asset_attributes["handleStart"] - if "handleEnd" in asset_attributes: - handle_end = asset_attributes["handleEnd"] - - asset_attributes["fps"] = float("{0:.4f}".format( - asset_attributes["fps"])) - - # Get values from nukescript - script_attributes = { - "handleStart": ctx_data["handleStart"], - "handleEnd": ctx_data["handleEnd"], - "fps": float("{0:.4f}".format(ctx_data["fps"])), - "frameStart": ctx_data["frameStart"], - "frameEnd": ctx_data["frameEnd"], - "resolutionWidth": ctx_data["resolutionWidth"], - "resolutionHeight": ctx_data["resolutionHeight"], - "pixelAspect": ctx_data["pixelAspect"] - } - - # Compare asset's values Nukescript X Database - not_matching = [] - for attr in attributes: - self.log.debug("asset vs script attribute \"{}\": {}, {}".format( - attr, asset_attributes[attr], script_attributes[attr]) - ) - if asset_attributes[attr] != script_attributes[attr]: - not_matching.append(attr) - - # Raise error if not matching - if len(not_matching) > 0: - msg = "Attributes '{}' are not set correctly" - # Alert user that handles are set if Frame start/end not match - if ( - (("frameStart" in not_matching) or ("frameEnd" in not_matching)) and - ((handle_start > 0) or (handle_end > 0)) - ): - msg += " (`handle_start` are set to {})".format(handle_start) - msg += " (`handle_end` are set to {})".format(handle_end) - message = msg.format(", ".join(not_matching)) - raise ValueError(message) - - def check_parent_hierarchical( - self, project_name, parent_type, parent_id, attr - ): - if parent_id is None: - return None - - doc = None - if parent_type == "project": - doc = get_project(project_name) - elif parent_type == "asset": - doc = get_asset_by_id(project_name, parent_id) - - if not doc: - return None - - doc_data = doc["data"] - if attr in doc_data: - self.log.info(attr) - return doc_data[attr] - - if parent_type == "project": - return None - - parent_id = doc_data.get("visualParent") - new_parent_type = "asset" - if parent_id is None: - parent_id = doc["parent"] - new_parent_type = "project" - - return self.check_parent_hierarchical( - project_name, new_parent_type, parent_id, attr - ) diff --git a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py new file mode 100644 index 0000000000..f0632f8080 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py @@ -0,0 +1,127 @@ +from pprint import pformat +import pyblish.api + +from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import RepairAction +from openpype.hosts.nuke.api.lib import ( + get_avalon_knob_data, + WorkfileSettings +) +import nuke + + +@pyblish.api.log +class ValidateScriptAttributes(pyblish.api.InstancePlugin): + """ Validates file output. """ + + order = pyblish.api.ValidatorOrder + 0.1 + families = ["workfile"] + label = "Validatte script attributes" + hosts = ["nuke"] + optional = True + actions = [RepairAction] + + def process(self, instance): + root = nuke.root() + knob_data = get_avalon_knob_data(root) + asset = instance.data["assetEntity"] + # get asset data frame values + frame_start = asset["data"]["frameStart"] + frame_end = asset["data"]["frameEnd"] + handle_start = asset["data"]["handleStart"] + handle_end = asset["data"]["handleEnd"] + + # These attributes will be checked + attributes = [ + "fps", + "frameStart", + "frameEnd", + "resolutionWidth", + "resolutionHeight", + "handleStart", + "handleEnd" + ] + + # get only defined attributes from asset data + asset_attributes = { + attr: asset["data"][attr] + for attr in attributes + if attr in asset["data"] + } + # fix float to max 4 digints (only for evaluating) + fps_data = float("{0:.4f}".format( + asset_attributes["fps"])) + # fix frame values to include handles + asset_attributes.update({ + "frameStart": frame_start - handle_start, + "frameEnd": frame_end + handle_end, + "fps": fps_data + }) + + self.log.debug(pformat( + asset_attributes + )) + + # Get format + _format = root["format"].value() + + # Get values from nukescript + script_attributes = { + "handleStart": int(knob_data["handleStart"]), + "handleEnd": int(knob_data["handleEnd"]), + "fps": float("{0:.4f}".format(root['fps'].value())), + "frameStart": int(root["first_frame"].getValue()), + "frameEnd": int(root["last_frame"].getValue()), + "resolutionWidth": _format.width(), + "resolutionHeight": _format.height(), + "pixelAspect": _format.pixelAspect() + } + self.log.debug(pformat( + script_attributes + )) + + # Compare asset's values Nukescript X Database + not_matching = [] + for attr in attributes: + self.log.debug( + "Asset vs Script attribute \"{}\": {}, {}".format( + attr, + asset_attributes[attr], + script_attributes[attr] + ) + ) + if asset_attributes[attr] != script_attributes[attr]: + not_matching.append({ + "name": attr, + "expected": asset_attributes[attr], + "actual": script_attributes[attr] + }) + + # Raise error if not matching + if not_matching: + msg = "Following attributes are not set correctly: \n{}" + attrs_wrong_str = "\n".join([ + ( + "`{0}` is set to `{1}`, " + "but should be set to `{2}`" + ).format(at["name"], at["actual"], at["expected"]) + for at in not_matching + ]) + attrs_wrong_html = "
".join([ + ( + "-- __{0}__ is set to __{1}__, " + "but should be set to __{2}__" + ).format(at["name"], at["actual"], at["expected"]) + for at in not_matching + ]) + raise PublishXmlValidationError( + self, msg.format(attrs_wrong_str), + formatting_data={ + "failed_attributes": attrs_wrong_html + } + ) + + @classmethod + def repair(cls, instance): + cls.log.debug("__ repairing instance: {}".format(instance)) + WorkfileSettings().set_context_settings() diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py b/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py index 9fb57c1698..699526ef57 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py @@ -3,8 +3,9 @@ import toml import nuke import pyblish.api -import openpype.api + from openpype.pipeline import discover_creator_plugins +from openpype.pipeline.publish import RepairAction from openpype.hosts.nuke.api.lib import get_avalon_knob_data @@ -16,7 +17,7 @@ class ValidateWriteLegacy(pyblish.api.InstancePlugin): families = ["write"] label = "Validate Write Legacy" hosts = ["nuke"] - actions = [openpype.api.RepairAction] + actions = [RepairAction] def process(self, instance): node = instance[0] diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index c0d5c8f402..3e2881f298 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -1,10 +1,11 @@ -import os import pyblish.api -import openpype.utils +from openpype.pipeline.publish import get_errored_instances_from_context from openpype.hosts.nuke.api.lib import ( get_write_node_template_attr, - get_node_path + set_node_knobs_from_settings, + color_gui_to_int ) +from openpype.pipeline import PublishXmlValidationError @pyblish.api.log @@ -14,18 +15,29 @@ class RepairNukeWriteNodeAction(pyblish.api.Action): icon = "wrench" def process(self, context, plugin): - instances = openpype.utils.filter_instances(context, plugin) + instances = get_errored_instances_from_context(context) for instance in instances: - node = instance[1] - correct_data = get_write_node_template_attr(node) - for k, v in correct_data.items(): - node[k].setValue(v) + write_group_node = instance[0] + # get write node from inside of group + write_node = None + for x in instance: + if x.Class() == "Write": + write_node = x + + correct_data = get_write_node_template_attr(write_group_node) + + set_node_knobs_from_settings(write_node, correct_data["knobs"]) + self.log.info("Node attributes were fixed") class ValidateNukeWriteNode(pyblish.api.InstancePlugin): - """ Validates file output. """ + """ Validate Write node's knobs. + + Compare knobs on write node inside the render group + with settings. At the moment supporting only `file` knob. + """ order = pyblish.api.ValidatorOrder optional = True @@ -35,38 +47,75 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin): hosts = ["nuke"] def process(self, instance): + write_group_node = instance[0] - node = instance[1] - correct_data = get_write_node_template_attr(node) + # get write node from inside of group + write_node = None + for x in instance: + if x.Class() == "Write": + write_node = x + + if write_node is None: + return + + correct_data = get_write_node_template_attr(write_group_node) + + if correct_data: + check_knobs = correct_data["knobs"] + else: + return check = [] - for k, v in correct_data.items(): - if k is 'file': - padding = len(v.split('#')) - ref_path = get_node_path(v, padding) - n_path = get_node_path(node[k].value(), padding) - isnt = False - for i, p in enumerate(ref_path): - if str(n_path[i]) not in str(p): - if not isnt: - isnt = True - else: - continue - if isnt: - check.append([k, v, node[k].value()]) + self.log.debug("__ write_node: {}".format( + write_node + )) + + for knob_data in check_knobs: + key = knob_data["name"] + value = knob_data["value"] + node_value = write_node[key].value() + + # fix type differences + if type(node_value) in (int, float): + try: + if isinstance(value, list): + value = color_gui_to_int(value) + else: + value = float(value) + node_value = float(node_value) + except ValueError: + value = str(value) else: - if str(node[k].value()) not in str(v): - check.append([k, v, node[k].value()]) + value = str(value) + node_value = str(node_value) + + self.log.debug("__ key: {} | value: {}".format( + key, value + )) + if ( + node_value != value + and key != "file" + and key != "tile_color" + ): + check.append([key, value, write_node[key].value()]) self.log.info(check) - msg = "Node's attribute `{0}` is not correct!\n" \ - "\nCorrect: `{1}` \n\nWrong: `{2}` \n\n" - if check: - print_msg = "" - for item in check: - print_msg += msg.format(item[0], item[1], item[2]) - print_msg += "`RMB` click to the validator and `A` to fix!" + self._make_error(check) - assert not check, print_msg + def _make_error(self, check): + # sourcery skip: merge-assign-and-aug-assign, move-assign-in-block + dbg_msg = "Write node's knobs values are not correct!\n" + msg_add = "Knob '{0}' > Correct: `{1}` > Wrong: `{2}`" + + details = [ + msg_add.format(item[0], item[1], item[2]) + for item in check + ] + xml_msg = "
".join(details) + dbg_msg += "\n\t".join(details) + + raise PublishXmlValidationError( + self, dbg_msg, formatting_data={"xml_msg": xml_msg} + ) diff --git a/openpype/hosts/nuke/startup/clear_rendered.py b/openpype/hosts/nuke/startup/clear_rendered.py index cf1d8ce170..744af71034 100644 --- a/openpype/hosts/nuke/startup/clear_rendered.py +++ b/openpype/hosts/nuke/startup/clear_rendered.py @@ -1,10 +1,11 @@ import os -from openpype.api import Logger -log = Logger().get_logger(__name__) +from openpype.lib import Logger def clear_rendered(dir_path): + log = Logger.get_logger(__name__) + for _f in os.listdir(dir_path): _f_path = os.path.join(dir_path, _f) log.info("Removing: `{}`".format(_f_path)) diff --git a/openpype/hosts/nuke/startup/menu.py b/openpype/hosts/nuke/startup/menu.py index 1461d41385..5e29121e9b 100644 --- a/openpype/hosts/nuke/startup/menu.py +++ b/openpype/hosts/nuke/startup/menu.py @@ -1,7 +1,7 @@ import nuke import os -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import install_host from openpype.hosts.nuke import api from openpype.hosts.nuke.api.lib import ( diff --git a/openpype/hosts/nuke/startup/write_to_read.py b/openpype/hosts/nuke/startup/write_to_read.py index f5cf66b357..b7add40f47 100644 --- a/openpype/hosts/nuke/startup/write_to_read.py +++ b/openpype/hosts/nuke/startup/write_to_read.py @@ -2,8 +2,8 @@ import re import os import glob import nuke -from openpype.api import Logger -log = Logger().get_logger(__name__) +from openpype.lib import Logger +log = Logger.get_logger(__name__) SINGLE_FILE_FORMATS = ['avi', 'mp4', 'mxf', 'mov', 'mpg', 'mpeg', 'wmv', 'm4v', 'm2v'] diff --git a/openpype/hosts/nuke/vendor/google/protobuf/__init__.py b/openpype/hosts/nuke/vendor/google/protobuf/__init__.py new file mode 100644 index 0000000000..03f3b29ee7 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/__init__.py @@ -0,0 +1,33 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Copyright 2007 Google Inc. All Rights Reserved. + +__version__ = '3.20.1' diff --git a/openpype/hosts/nuke/vendor/google/protobuf/any_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/any_pb2.py new file mode 100644 index 0000000000..9121193d11 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/any_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/any.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42v\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _ANY._serialized_start=46 + _ANY._serialized_end=84 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/api_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/api_pb2.py new file mode 100644 index 0000000000..1721b10a75 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/api_pb2.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/api.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 +from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _API._serialized_start=113 + _API._serialized_end=370 + _METHOD._serialized_start=373 + _METHOD._serialized_end=586 + _MIXIN._serialized_start=588 + _MIXIN._serialized_end=623 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/compiler/__init__.py b/openpype/hosts/nuke/vendor/google/protobuf/compiler/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/nuke/vendor/google/protobuf/compiler/plugin_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/compiler/plugin_pb2.py new file mode 100644 index 0000000000..715a891370 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/compiler/plugin_pb2.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/compiler/plugin.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"F\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\x0e\n\x06suffix\x18\x04 \x01(\t\"\xba\x01\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\x12;\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.Version\"\xc1\x02\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x1a\n\x12supported_features\x18\x02 \x01(\x04\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a\x7f\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\t\x12?\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfo\"8\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x42W\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb' + _VERSION._serialized_start=101 + _VERSION._serialized_end=171 + _CODEGENERATORREQUEST._serialized_start=174 + _CODEGENERATORREQUEST._serialized_end=360 + _CODEGENERATORRESPONSE._serialized_start=363 + _CODEGENERATORRESPONSE._serialized_end=684 + _CODEGENERATORRESPONSE_FILE._serialized_start=499 + _CODEGENERATORRESPONSE_FILE._serialized_end=626 + _CODEGENERATORRESPONSE_FEATURE._serialized_start=628 + _CODEGENERATORRESPONSE_FEATURE._serialized_end=684 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/descriptor.py b/openpype/hosts/nuke/vendor/google/protobuf/descriptor.py new file mode 100644 index 0000000000..ad70be9a11 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/descriptor.py @@ -0,0 +1,1224 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Descriptors essentially contain exactly the information found in a .proto +file, in types that make this information accessible in Python. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import threading +import warnings + +from google.protobuf.internal import api_implementation + +_USE_C_DESCRIPTORS = False +if api_implementation.Type() == 'cpp': + # Used by MakeDescriptor in cpp mode + import binascii + import os + from google.protobuf.pyext import _message + _USE_C_DESCRIPTORS = True + + +class Error(Exception): + """Base error for this module.""" + + +class TypeTransformationError(Error): + """Error transforming between python proto type and corresponding C++ type.""" + + +if _USE_C_DESCRIPTORS: + # This metaclass allows to override the behavior of code like + # isinstance(my_descriptor, FieldDescriptor) + # and make it return True when the descriptor is an instance of the extension + # type written in C++. + class DescriptorMetaclass(type): + def __instancecheck__(cls, obj): + if super(DescriptorMetaclass, cls).__instancecheck__(obj): + return True + if isinstance(obj, cls._C_DESCRIPTOR_CLASS): + return True + return False +else: + # The standard metaclass; nothing changes. + DescriptorMetaclass = type + + +class _Lock(object): + """Wrapper class of threading.Lock(), which is allowed by 'with'.""" + + def __new__(cls): + self = object.__new__(cls) + self._lock = threading.Lock() # pylint: disable=protected-access + return self + + def __enter__(self): + self._lock.acquire() + + def __exit__(self, exc_type, exc_value, exc_tb): + self._lock.release() + + +_lock = threading.Lock() + + +def _Deprecated(name): + if _Deprecated.count > 0: + _Deprecated.count -= 1 + warnings.warn( + 'Call to deprecated create function %s(). Note: Create unlinked ' + 'descriptors is going to go away. Please use get/find descriptors from ' + 'generated code or query the descriptor_pool.' + % name, + category=DeprecationWarning, stacklevel=3) + + +# Deprecated warnings will print 100 times at most which should be enough for +# users to notice and do not cause timeout. +_Deprecated.count = 100 + + +_internal_create_key = object() + + +class DescriptorBase(metaclass=DescriptorMetaclass): + + """Descriptors base class. + + This class is the base of all descriptor classes. It provides common options + related functionality. + + Attributes: + has_options: True if the descriptor has non-default options. Usually it + is not necessary to read this -- just call GetOptions() which will + happily return the default instance. However, it's sometimes useful + for efficiency, and also useful inside the protobuf implementation to + avoid some bootstrapping issues. + """ + + if _USE_C_DESCRIPTORS: + # The class, or tuple of classes, that are considered as "virtual + # subclasses" of this descriptor class. + _C_DESCRIPTOR_CLASS = () + + def __init__(self, options, serialized_options, options_class_name): + """Initialize the descriptor given its options message and the name of the + class of the options message. The name of the class is required in case + the options message is None and has to be created. + """ + self._options = options + self._options_class_name = options_class_name + self._serialized_options = serialized_options + + # Does this descriptor have non-default options? + self.has_options = (options is not None) or (serialized_options is not None) + + def _SetOptions(self, options, options_class_name): + """Sets the descriptor's options + + This function is used in generated proto2 files to update descriptor + options. It must not be used outside proto2. + """ + self._options = options + self._options_class_name = options_class_name + + # Does this descriptor have non-default options? + self.has_options = options is not None + + def GetOptions(self): + """Retrieves descriptor options. + + This method returns the options set or creates the default options for the + descriptor. + """ + if self._options: + return self._options + + from google.protobuf import descriptor_pb2 + try: + options_class = getattr(descriptor_pb2, + self._options_class_name) + except AttributeError: + raise RuntimeError('Unknown options class name %s!' % + (self._options_class_name)) + + with _lock: + if self._serialized_options is None: + self._options = options_class() + else: + self._options = _ParseOptions(options_class(), + self._serialized_options) + + return self._options + + +class _NestedDescriptorBase(DescriptorBase): + """Common class for descriptors that can be nested.""" + + def __init__(self, options, options_class_name, name, full_name, + file, containing_type, serialized_start=None, + serialized_end=None, serialized_options=None): + """Constructor. + + Args: + options: Protocol message options or None + to use default message options. + options_class_name (str): The class name of the above options. + name (str): Name of this protocol message type. + full_name (str): Fully-qualified name of this protocol message type, + which will include protocol "package" name and the name of any + enclosing types. + file (FileDescriptor): Reference to file info. + containing_type: if provided, this is a nested descriptor, with this + descriptor as parent, otherwise None. + serialized_start: The start index (inclusive) in block in the + file.serialized_pb that describes this descriptor. + serialized_end: The end index (exclusive) in block in the + file.serialized_pb that describes this descriptor. + serialized_options: Protocol message serialized options or None. + """ + super(_NestedDescriptorBase, self).__init__( + options, serialized_options, options_class_name) + + self.name = name + # TODO(falk): Add function to calculate full_name instead of having it in + # memory? + self.full_name = full_name + self.file = file + self.containing_type = containing_type + + self._serialized_start = serialized_start + self._serialized_end = serialized_end + + def CopyToProto(self, proto): + """Copies this to the matching proto in descriptor_pb2. + + Args: + proto: An empty proto instance from descriptor_pb2. + + Raises: + Error: If self couldn't be serialized, due to to few constructor + arguments. + """ + if (self.file is not None and + self._serialized_start is not None and + self._serialized_end is not None): + proto.ParseFromString(self.file.serialized_pb[ + self._serialized_start:self._serialized_end]) + else: + raise Error('Descriptor does not contain serialization.') + + +class Descriptor(_NestedDescriptorBase): + + """Descriptor for a protocol message type. + + Attributes: + name (str): Name of this protocol message type. + full_name (str): Fully-qualified name of this protocol message type, + which will include protocol "package" name and the name of any + enclosing types. + containing_type (Descriptor): Reference to the descriptor of the type + containing us, or None if this is top-level. + fields (list[FieldDescriptor]): Field descriptors for all fields in + this type. + fields_by_number (dict(int, FieldDescriptor)): Same + :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed + by "number" attribute in each FieldDescriptor. + fields_by_name (dict(str, FieldDescriptor)): Same + :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed by + "name" attribute in each :class:`FieldDescriptor`. + nested_types (list[Descriptor]): Descriptor references + for all protocol message types nested within this one. + nested_types_by_name (dict(str, Descriptor)): Same Descriptor + objects as in :attr:`nested_types`, but indexed by "name" attribute + in each Descriptor. + enum_types (list[EnumDescriptor]): :class:`EnumDescriptor` references + for all enums contained within this type. + enum_types_by_name (dict(str, EnumDescriptor)): Same + :class:`EnumDescriptor` objects as in :attr:`enum_types`, but + indexed by "name" attribute in each EnumDescriptor. + enum_values_by_name (dict(str, EnumValueDescriptor)): Dict mapping + from enum value name to :class:`EnumValueDescriptor` for that value. + extensions (list[FieldDescriptor]): All extensions defined directly + within this message type (NOT within a nested type). + extensions_by_name (dict(str, FieldDescriptor)): Same FieldDescriptor + objects as :attr:`extensions`, but indexed by "name" attribute of each + FieldDescriptor. + is_extendable (bool): Does this type define any extension ranges? + oneofs (list[OneofDescriptor]): The list of descriptors for oneof fields + in this message. + oneofs_by_name (dict(str, OneofDescriptor)): Same objects as in + :attr:`oneofs`, but indexed by "name" attribute. + file (FileDescriptor): Reference to file descriptor. + + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.Descriptor + + def __new__( + cls, + name=None, + full_name=None, + filename=None, + containing_type=None, + fields=None, + nested_types=None, + enum_types=None, + extensions=None, + options=None, + serialized_options=None, + is_extendable=True, + extension_ranges=None, + oneofs=None, + file=None, # pylint: disable=redefined-builtin + serialized_start=None, + serialized_end=None, + syntax=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindMessageTypeByName(full_name) + + # NOTE(tmarek): The file argument redefining a builtin is nothing we can + # fix right now since we don't know how many clients already rely on the + # name of the argument. + def __init__(self, name, full_name, filename, containing_type, fields, + nested_types, enum_types, extensions, options=None, + serialized_options=None, + is_extendable=True, extension_ranges=None, oneofs=None, + file=None, serialized_start=None, serialized_end=None, # pylint: disable=redefined-builtin + syntax=None, create_key=None): + """Arguments to __init__() are as described in the description + of Descriptor fields above. + + Note that filename is an obsolete argument, that is not used anymore. + Please use file.name to access this as an attribute. + """ + if create_key is not _internal_create_key: + _Deprecated('Descriptor') + + super(Descriptor, self).__init__( + options, 'MessageOptions', name, full_name, file, + containing_type, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + + # We have fields in addition to fields_by_name and fields_by_number, + # so that: + # 1. Clients can index fields by "order in which they're listed." + # 2. Clients can easily iterate over all fields with the terse + # syntax: for f in descriptor.fields: ... + self.fields = fields + for field in self.fields: + field.containing_type = self + self.fields_by_number = dict((f.number, f) for f in fields) + self.fields_by_name = dict((f.name, f) for f in fields) + self._fields_by_camelcase_name = None + + self.nested_types = nested_types + for nested_type in nested_types: + nested_type.containing_type = self + self.nested_types_by_name = dict((t.name, t) for t in nested_types) + + self.enum_types = enum_types + for enum_type in self.enum_types: + enum_type.containing_type = self + self.enum_types_by_name = dict((t.name, t) for t in enum_types) + self.enum_values_by_name = dict( + (v.name, v) for t in enum_types for v in t.values) + + self.extensions = extensions + for extension in self.extensions: + extension.extension_scope = self + self.extensions_by_name = dict((f.name, f) for f in extensions) + self.is_extendable = is_extendable + self.extension_ranges = extension_ranges + self.oneofs = oneofs if oneofs is not None else [] + self.oneofs_by_name = dict((o.name, o) for o in self.oneofs) + for oneof in self.oneofs: + oneof.containing_type = self + self.syntax = syntax or "proto2" + + @property + def fields_by_camelcase_name(self): + """Same FieldDescriptor objects as in :attr:`fields`, but indexed by + :attr:`FieldDescriptor.camelcase_name`. + """ + if self._fields_by_camelcase_name is None: + self._fields_by_camelcase_name = dict( + (f.camelcase_name, f) for f in self.fields) + return self._fields_by_camelcase_name + + def EnumValueName(self, enum, value): + """Returns the string name of an enum value. + + This is just a small helper method to simplify a common operation. + + Args: + enum: string name of the Enum. + value: int, value of the enum. + + Returns: + string name of the enum value. + + Raises: + KeyError if either the Enum doesn't exist or the value is not a valid + value for the enum. + """ + return self.enum_types_by_name[enum].values_by_number[value].name + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.DescriptorProto. + + Args: + proto: An empty descriptor_pb2.DescriptorProto. + """ + # This function is overridden to give a better doc comment. + super(Descriptor, self).CopyToProto(proto) + + +# TODO(robinson): We should have aggressive checking here, +# for example: +# * If you specify a repeated field, you should not be allowed +# to specify a default value. +# * [Other examples here as needed]. +# +# TODO(robinson): for this and other *Descriptor classes, we +# might also want to lock things down aggressively (e.g., +# prevent clients from setting the attributes). Having +# stronger invariants here in general will reduce the number +# of runtime checks we must do in reflection.py... +class FieldDescriptor(DescriptorBase): + + """Descriptor for a single field in a .proto file. + + Attributes: + name (str): Name of this field, exactly as it appears in .proto. + full_name (str): Name of this field, including containing scope. This is + particularly relevant for extensions. + index (int): Dense, 0-indexed index giving the order that this + field textually appears within its message in the .proto file. + number (int): Tag number declared for this field in the .proto file. + + type (int): (One of the TYPE_* constants below) Declared type. + cpp_type (int): (One of the CPPTYPE_* constants below) C++ type used to + represent this field. + + label (int): (One of the LABEL_* constants below) Tells whether this + field is optional, required, or repeated. + has_default_value (bool): True if this field has a default value defined, + otherwise false. + default_value (Varies): Default value of this field. Only + meaningful for non-repeated scalar fields. Repeated fields + should always set this to [], and non-repeated composite + fields should always set this to None. + + containing_type (Descriptor): Descriptor of the protocol message + type that contains this field. Set by the Descriptor constructor + if we're passed into one. + Somewhat confusingly, for extension fields, this is the + descriptor of the EXTENDED message, not the descriptor + of the message containing this field. (See is_extension and + extension_scope below). + message_type (Descriptor): If a composite field, a descriptor + of the message type contained in this field. Otherwise, this is None. + enum_type (EnumDescriptor): If this field contains an enum, a + descriptor of that enum. Otherwise, this is None. + + is_extension: True iff this describes an extension field. + extension_scope (Descriptor): Only meaningful if is_extension is True. + Gives the message that immediately contains this extension field. + Will be None iff we're a top-level (file-level) extension field. + + options (descriptor_pb2.FieldOptions): Protocol message field options or + None to use default field options. + + containing_oneof (OneofDescriptor): If the field is a member of a oneof + union, contains its descriptor. Otherwise, None. + + file (FileDescriptor): Reference to file descriptor. + """ + + # Must be consistent with C++ FieldDescriptor::Type enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + TYPE_DOUBLE = 1 + TYPE_FLOAT = 2 + TYPE_INT64 = 3 + TYPE_UINT64 = 4 + TYPE_INT32 = 5 + TYPE_FIXED64 = 6 + TYPE_FIXED32 = 7 + TYPE_BOOL = 8 + TYPE_STRING = 9 + TYPE_GROUP = 10 + TYPE_MESSAGE = 11 + TYPE_BYTES = 12 + TYPE_UINT32 = 13 + TYPE_ENUM = 14 + TYPE_SFIXED32 = 15 + TYPE_SFIXED64 = 16 + TYPE_SINT32 = 17 + TYPE_SINT64 = 18 + MAX_TYPE = 18 + + # Must be consistent with C++ FieldDescriptor::CppType enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + CPPTYPE_INT32 = 1 + CPPTYPE_INT64 = 2 + CPPTYPE_UINT32 = 3 + CPPTYPE_UINT64 = 4 + CPPTYPE_DOUBLE = 5 + CPPTYPE_FLOAT = 6 + CPPTYPE_BOOL = 7 + CPPTYPE_ENUM = 8 + CPPTYPE_STRING = 9 + CPPTYPE_MESSAGE = 10 + MAX_CPPTYPE = 10 + + _PYTHON_TO_CPP_PROTO_TYPE_MAP = { + TYPE_DOUBLE: CPPTYPE_DOUBLE, + TYPE_FLOAT: CPPTYPE_FLOAT, + TYPE_ENUM: CPPTYPE_ENUM, + TYPE_INT64: CPPTYPE_INT64, + TYPE_SINT64: CPPTYPE_INT64, + TYPE_SFIXED64: CPPTYPE_INT64, + TYPE_UINT64: CPPTYPE_UINT64, + TYPE_FIXED64: CPPTYPE_UINT64, + TYPE_INT32: CPPTYPE_INT32, + TYPE_SFIXED32: CPPTYPE_INT32, + TYPE_SINT32: CPPTYPE_INT32, + TYPE_UINT32: CPPTYPE_UINT32, + TYPE_FIXED32: CPPTYPE_UINT32, + TYPE_BYTES: CPPTYPE_STRING, + TYPE_STRING: CPPTYPE_STRING, + TYPE_BOOL: CPPTYPE_BOOL, + TYPE_MESSAGE: CPPTYPE_MESSAGE, + TYPE_GROUP: CPPTYPE_MESSAGE + } + + # Must be consistent with C++ FieldDescriptor::Label enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + LABEL_OPTIONAL = 1 + LABEL_REQUIRED = 2 + LABEL_REPEATED = 3 + MAX_LABEL = 3 + + # Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber, + # and kLastReservedNumber in descriptor.h + MAX_FIELD_NUMBER = (1 << 29) - 1 + FIRST_RESERVED_FIELD_NUMBER = 19000 + LAST_RESERVED_FIELD_NUMBER = 19999 + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.FieldDescriptor + + def __new__(cls, name, full_name, index, number, type, cpp_type, label, + default_value, message_type, enum_type, containing_type, + is_extension, extension_scope, options=None, + serialized_options=None, + has_default_value=True, containing_oneof=None, json_name=None, + file=None, create_key=None): # pylint: disable=redefined-builtin + _message.Message._CheckCalledFromGeneratedFile() + if is_extension: + return _message.default_pool.FindExtensionByName(full_name) + else: + return _message.default_pool.FindFieldByName(full_name) + + def __init__(self, name, full_name, index, number, type, cpp_type, label, + default_value, message_type, enum_type, containing_type, + is_extension, extension_scope, options=None, + serialized_options=None, + has_default_value=True, containing_oneof=None, json_name=None, + file=None, create_key=None): # pylint: disable=redefined-builtin + """The arguments are as described in the description of FieldDescriptor + attributes above. + + Note that containing_type may be None, and may be set later if necessary + (to deal with circular references between message types, for example). + Likewise for extension_scope. + """ + if create_key is not _internal_create_key: + _Deprecated('FieldDescriptor') + + super(FieldDescriptor, self).__init__( + options, serialized_options, 'FieldOptions') + self.name = name + self.full_name = full_name + self.file = file + self._camelcase_name = None + if json_name is None: + self.json_name = _ToJsonName(name) + else: + self.json_name = json_name + self.index = index + self.number = number + self.type = type + self.cpp_type = cpp_type + self.label = label + self.has_default_value = has_default_value + self.default_value = default_value + self.containing_type = containing_type + self.message_type = message_type + self.enum_type = enum_type + self.is_extension = is_extension + self.extension_scope = extension_scope + self.containing_oneof = containing_oneof + if api_implementation.Type() == 'cpp': + if is_extension: + self._cdescriptor = _message.default_pool.FindExtensionByName(full_name) + else: + self._cdescriptor = _message.default_pool.FindFieldByName(full_name) + else: + self._cdescriptor = None + + @property + def camelcase_name(self): + """Camelcase name of this field. + + Returns: + str: the name in CamelCase. + """ + if self._camelcase_name is None: + self._camelcase_name = _ToCamelCase(self.name) + return self._camelcase_name + + @property + def has_presence(self): + """Whether the field distinguishes between unpopulated and default values. + + Raises: + RuntimeError: singular field that is not linked with message nor file. + """ + if self.label == FieldDescriptor.LABEL_REPEATED: + return False + if (self.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE or + self.containing_oneof): + return True + if hasattr(self.file, 'syntax'): + return self.file.syntax == 'proto2' + if hasattr(self.message_type, 'syntax'): + return self.message_type.syntax == 'proto2' + raise RuntimeError( + 'has_presence is not ready to use because field %s is not' + ' linked with message type nor file' % self.full_name) + + @staticmethod + def ProtoTypeToCppProtoType(proto_type): + """Converts from a Python proto type to a C++ Proto Type. + + The Python ProtocolBuffer classes specify both the 'Python' datatype and the + 'C++' datatype - and they're not the same. This helper method should + translate from one to another. + + Args: + proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*) + Returns: + int: descriptor.FieldDescriptor.CPPTYPE_*, the C++ type. + Raises: + TypeTransformationError: when the Python proto type isn't known. + """ + try: + return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type] + except KeyError: + raise TypeTransformationError('Unknown proto_type: %s' % proto_type) + + +class EnumDescriptor(_NestedDescriptorBase): + + """Descriptor for an enum defined in a .proto file. + + Attributes: + name (str): Name of the enum type. + full_name (str): Full name of the type, including package name + and any enclosing type(s). + + values (list[EnumValueDescriptor]): List of the values + in this enum. + values_by_name (dict(str, EnumValueDescriptor)): Same as :attr:`values`, + but indexed by the "name" field of each EnumValueDescriptor. + values_by_number (dict(int, EnumValueDescriptor)): Same as :attr:`values`, + but indexed by the "number" field of each EnumValueDescriptor. + containing_type (Descriptor): Descriptor of the immediate containing + type of this enum, or None if this is an enum defined at the + top level in a .proto file. Set by Descriptor's constructor + if we're passed into one. + file (FileDescriptor): Reference to file descriptor. + options (descriptor_pb2.EnumOptions): Enum options message or + None to use default enum options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.EnumDescriptor + + def __new__(cls, name, full_name, filename, values, + containing_type=None, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindEnumTypeByName(full_name) + + def __init__(self, name, full_name, filename, values, + containing_type=None, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + """Arguments are as described in the attribute description above. + + Note that filename is an obsolete argument, that is not used anymore. + Please use file.name to access this as an attribute. + """ + if create_key is not _internal_create_key: + _Deprecated('EnumDescriptor') + + super(EnumDescriptor, self).__init__( + options, 'EnumOptions', name, full_name, file, + containing_type, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + + self.values = values + for value in self.values: + value.type = self + self.values_by_name = dict((v.name, v) for v in values) + # Values are reversed to ensure that the first alias is retained. + self.values_by_number = dict((v.number, v) for v in reversed(values)) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.EnumDescriptorProto. + + Args: + proto (descriptor_pb2.EnumDescriptorProto): An empty descriptor proto. + """ + # This function is overridden to give a better doc comment. + super(EnumDescriptor, self).CopyToProto(proto) + + +class EnumValueDescriptor(DescriptorBase): + + """Descriptor for a single value within an enum. + + Attributes: + name (str): Name of this value. + index (int): Dense, 0-indexed index giving the order that this + value appears textually within its enum in the .proto file. + number (int): Actual number assigned to this enum value. + type (EnumDescriptor): :class:`EnumDescriptor` to which this value + belongs. Set by :class:`EnumDescriptor`'s constructor if we're + passed into one. + options (descriptor_pb2.EnumValueOptions): Enum value options message or + None to use default enum value options options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor + + def __new__(cls, name, index, number, + type=None, # pylint: disable=redefined-builtin + options=None, serialized_options=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + # There is no way we can build a complete EnumValueDescriptor with the + # given parameters (the name of the Enum is not known, for example). + # Fortunately generated files just pass it to the EnumDescriptor() + # constructor, which will ignore it, so returning None is good enough. + return None + + def __init__(self, name, index, number, + type=None, # pylint: disable=redefined-builtin + options=None, serialized_options=None, create_key=None): + """Arguments are as described in the attribute description above.""" + if create_key is not _internal_create_key: + _Deprecated('EnumValueDescriptor') + + super(EnumValueDescriptor, self).__init__( + options, serialized_options, 'EnumValueOptions') + self.name = name + self.index = index + self.number = number + self.type = type + + +class OneofDescriptor(DescriptorBase): + """Descriptor for a oneof field. + + Attributes: + name (str): Name of the oneof field. + full_name (str): Full name of the oneof field, including package name. + index (int): 0-based index giving the order of the oneof field inside + its containing type. + containing_type (Descriptor): :class:`Descriptor` of the protocol message + type that contains this field. Set by the :class:`Descriptor` constructor + if we're passed into one. + fields (list[FieldDescriptor]): The list of field descriptors this + oneof can contain. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.OneofDescriptor + + def __new__( + cls, name, full_name, index, containing_type, fields, options=None, + serialized_options=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindOneofByName(full_name) + + def __init__( + self, name, full_name, index, containing_type, fields, options=None, + serialized_options=None, create_key=None): + """Arguments are as described in the attribute description above.""" + if create_key is not _internal_create_key: + _Deprecated('OneofDescriptor') + + super(OneofDescriptor, self).__init__( + options, serialized_options, 'OneofOptions') + self.name = name + self.full_name = full_name + self.index = index + self.containing_type = containing_type + self.fields = fields + + +class ServiceDescriptor(_NestedDescriptorBase): + + """Descriptor for a service. + + Attributes: + name (str): Name of the service. + full_name (str): Full name of the service, including package name. + index (int): 0-indexed index giving the order that this services + definition appears within the .proto file. + methods (list[MethodDescriptor]): List of methods provided by this + service. + methods_by_name (dict(str, MethodDescriptor)): Same + :class:`MethodDescriptor` objects as in :attr:`methods_by_name`, but + indexed by "name" attribute in each :class:`MethodDescriptor`. + options (descriptor_pb2.ServiceOptions): Service options message or + None to use default service options. + file (FileDescriptor): Reference to file info. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.ServiceDescriptor + + def __new__( + cls, + name=None, + full_name=None, + index=None, + methods=None, + options=None, + serialized_options=None, + file=None, # pylint: disable=redefined-builtin + serialized_start=None, + serialized_end=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access + return _message.default_pool.FindServiceByName(full_name) + + def __init__(self, name, full_name, index, methods, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + if create_key is not _internal_create_key: + _Deprecated('ServiceDescriptor') + + super(ServiceDescriptor, self).__init__( + options, 'ServiceOptions', name, full_name, file, + None, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + self.index = index + self.methods = methods + self.methods_by_name = dict((m.name, m) for m in methods) + # Set the containing service for each method in this service. + for method in self.methods: + method.containing_service = self + + def FindMethodByName(self, name): + """Searches for the specified method, and returns its descriptor. + + Args: + name (str): Name of the method. + Returns: + MethodDescriptor or None: the descriptor for the requested method, if + found. + """ + return self.methods_by_name.get(name, None) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.ServiceDescriptorProto. + + Args: + proto (descriptor_pb2.ServiceDescriptorProto): An empty descriptor proto. + """ + # This function is overridden to give a better doc comment. + super(ServiceDescriptor, self).CopyToProto(proto) + + +class MethodDescriptor(DescriptorBase): + + """Descriptor for a method in a service. + + Attributes: + name (str): Name of the method within the service. + full_name (str): Full name of method. + index (int): 0-indexed index of the method inside the service. + containing_service (ServiceDescriptor): The service that contains this + method. + input_type (Descriptor): The descriptor of the message that this method + accepts. + output_type (Descriptor): The descriptor of the message that this method + returns. + client_streaming (bool): Whether this method uses client streaming. + server_streaming (bool): Whether this method uses server streaming. + options (descriptor_pb2.MethodOptions or None): Method options message, or + None to use default method options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.MethodDescriptor + + def __new__(cls, + name, + full_name, + index, + containing_service, + input_type, + output_type, + client_streaming=False, + server_streaming=False, + options=None, + serialized_options=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access + return _message.default_pool.FindMethodByName(full_name) + + def __init__(self, + name, + full_name, + index, + containing_service, + input_type, + output_type, + client_streaming=False, + server_streaming=False, + options=None, + serialized_options=None, + create_key=None): + """The arguments are as described in the description of MethodDescriptor + attributes above. + + Note that containing_service may be None, and may be set later if necessary. + """ + if create_key is not _internal_create_key: + _Deprecated('MethodDescriptor') + + super(MethodDescriptor, self).__init__( + options, serialized_options, 'MethodOptions') + self.name = name + self.full_name = full_name + self.index = index + self.containing_service = containing_service + self.input_type = input_type + self.output_type = output_type + self.client_streaming = client_streaming + self.server_streaming = server_streaming + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.MethodDescriptorProto. + + Args: + proto (descriptor_pb2.MethodDescriptorProto): An empty descriptor proto. + + Raises: + Error: If self couldn't be serialized, due to too few constructor + arguments. + """ + if self.containing_service is not None: + from google.protobuf import descriptor_pb2 + service_proto = descriptor_pb2.ServiceDescriptorProto() + self.containing_service.CopyToProto(service_proto) + proto.CopyFrom(service_proto.method[self.index]) + else: + raise Error('Descriptor does not contain a service.') + + +class FileDescriptor(DescriptorBase): + """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto. + + Note that :attr:`enum_types_by_name`, :attr:`extensions_by_name`, and + :attr:`dependencies` fields are only set by the + :py:mod:`google.protobuf.message_factory` module, and not by the generated + proto code. + + Attributes: + name (str): Name of file, relative to root of source tree. + package (str): Name of the package + syntax (str): string indicating syntax of the file (can be "proto2" or + "proto3") + serialized_pb (bytes): Byte string of serialized + :class:`descriptor_pb2.FileDescriptorProto`. + dependencies (list[FileDescriptor]): List of other :class:`FileDescriptor` + objects this :class:`FileDescriptor` depends on. + public_dependencies (list[FileDescriptor]): A subset of + :attr:`dependencies`, which were declared as "public". + message_types_by_name (dict(str, Descriptor)): Mapping from message names + to their :class:`Descriptor`. + enum_types_by_name (dict(str, EnumDescriptor)): Mapping from enum names to + their :class:`EnumDescriptor`. + extensions_by_name (dict(str, FieldDescriptor)): Mapping from extension + names declared at file scope to their :class:`FieldDescriptor`. + services_by_name (dict(str, ServiceDescriptor)): Mapping from services' + names to their :class:`ServiceDescriptor`. + pool (DescriptorPool): The pool this descriptor belongs to. When not + passed to the constructor, the global default pool is used. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.FileDescriptor + + def __new__(cls, name, package, options=None, + serialized_options=None, serialized_pb=None, + dependencies=None, public_dependencies=None, + syntax=None, pool=None, create_key=None): + # FileDescriptor() is called from various places, not only from generated + # files, to register dynamic proto files and messages. + # pylint: disable=g-explicit-bool-comparison + if serialized_pb == b'': + # Cpp generated code must be linked in if serialized_pb is '' + try: + return _message.default_pool.FindFileByName(name) + except KeyError: + raise RuntimeError('Please link in cpp generated lib for %s' % (name)) + elif serialized_pb: + return _message.default_pool.AddSerializedFile(serialized_pb) + else: + return super(FileDescriptor, cls).__new__(cls) + + def __init__(self, name, package, options=None, + serialized_options=None, serialized_pb=None, + dependencies=None, public_dependencies=None, + syntax=None, pool=None, create_key=None): + """Constructor.""" + if create_key is not _internal_create_key: + _Deprecated('FileDescriptor') + + super(FileDescriptor, self).__init__( + options, serialized_options, 'FileOptions') + + if pool is None: + from google.protobuf import descriptor_pool + pool = descriptor_pool.Default() + self.pool = pool + self.message_types_by_name = {} + self.name = name + self.package = package + self.syntax = syntax or "proto2" + self.serialized_pb = serialized_pb + + self.enum_types_by_name = {} + self.extensions_by_name = {} + self.services_by_name = {} + self.dependencies = (dependencies or []) + self.public_dependencies = (public_dependencies or []) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.FileDescriptorProto. + + Args: + proto: An empty descriptor_pb2.FileDescriptorProto. + """ + proto.ParseFromString(self.serialized_pb) + + +def _ParseOptions(message, string): + """Parses serialized options. + + This helper function is used to parse serialized options in generated + proto2 files. It must not be used outside proto2. + """ + message.ParseFromString(string) + return message + + +def _ToCamelCase(name): + """Converts name to camel-case and returns it.""" + capitalize_next = False + result = [] + + for c in name: + if c == '_': + if result: + capitalize_next = True + elif capitalize_next: + result.append(c.upper()) + capitalize_next = False + else: + result += c + + # Lower-case the first letter. + if result and result[0].isupper(): + result[0] = result[0].lower() + return ''.join(result) + + +def _OptionsOrNone(descriptor_proto): + """Returns the value of the field `options`, or None if it is not set.""" + if descriptor_proto.HasField('options'): + return descriptor_proto.options + else: + return None + + +def _ToJsonName(name): + """Converts name to Json name and returns it.""" + capitalize_next = False + result = [] + + for c in name: + if c == '_': + capitalize_next = True + elif capitalize_next: + result.append(c.upper()) + capitalize_next = False + else: + result += c + + return ''.join(result) + + +def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True, + syntax=None): + """Make a protobuf Descriptor given a DescriptorProto protobuf. + + Handles nested descriptors. Note that this is limited to the scope of defining + a message inside of another message. Composite fields can currently only be + resolved if the message is defined in the same scope as the field. + + Args: + desc_proto: The descriptor_pb2.DescriptorProto protobuf message. + package: Optional package name for the new message Descriptor (string). + build_file_if_cpp: Update the C++ descriptor pool if api matches. + Set to False on recursion, so no duplicates are created. + syntax: The syntax/semantics that should be used. Set to "proto3" to get + proto3 field presence semantics. + Returns: + A Descriptor for protobuf messages. + """ + if api_implementation.Type() == 'cpp' and build_file_if_cpp: + # The C++ implementation requires all descriptors to be backed by the same + # definition in the C++ descriptor pool. To do this, we build a + # FileDescriptorProto with the same definition as this descriptor and build + # it into the pool. + from google.protobuf import descriptor_pb2 + file_descriptor_proto = descriptor_pb2.FileDescriptorProto() + file_descriptor_proto.message_type.add().MergeFrom(desc_proto) + + # Generate a random name for this proto file to prevent conflicts with any + # imported ones. We need to specify a file name so the descriptor pool + # accepts our FileDescriptorProto, but it is not important what that file + # name is actually set to. + proto_name = binascii.hexlify(os.urandom(16)).decode('ascii') + + if package: + file_descriptor_proto.name = os.path.join(package.replace('.', '/'), + proto_name + '.proto') + file_descriptor_proto.package = package + else: + file_descriptor_proto.name = proto_name + '.proto' + + _message.default_pool.Add(file_descriptor_proto) + result = _message.default_pool.FindFileByName(file_descriptor_proto.name) + + if _USE_C_DESCRIPTORS: + return result.message_types_by_name[desc_proto.name] + + full_message_name = [desc_proto.name] + if package: full_message_name.insert(0, package) + + # Create Descriptors for enum types + enum_types = {} + for enum_proto in desc_proto.enum_type: + full_name = '.'.join(full_message_name + [enum_proto.name]) + enum_desc = EnumDescriptor( + enum_proto.name, full_name, None, [ + EnumValueDescriptor(enum_val.name, ii, enum_val.number, + create_key=_internal_create_key) + for ii, enum_val in enumerate(enum_proto.value)], + create_key=_internal_create_key) + enum_types[full_name] = enum_desc + + # Create Descriptors for nested types + nested_types = {} + for nested_proto in desc_proto.nested_type: + full_name = '.'.join(full_message_name + [nested_proto.name]) + # Nested types are just those defined inside of the message, not all types + # used by fields in the message, so no loops are possible here. + nested_desc = MakeDescriptor(nested_proto, + package='.'.join(full_message_name), + build_file_if_cpp=False, + syntax=syntax) + nested_types[full_name] = nested_desc + + fields = [] + for field_proto in desc_proto.field: + full_name = '.'.join(full_message_name + [field_proto.name]) + enum_desc = None + nested_desc = None + if field_proto.json_name: + json_name = field_proto.json_name + else: + json_name = None + if field_proto.HasField('type_name'): + type_name = field_proto.type_name + full_type_name = '.'.join(full_message_name + + [type_name[type_name.rfind('.')+1:]]) + if full_type_name in nested_types: + nested_desc = nested_types[full_type_name] + elif full_type_name in enum_types: + enum_desc = enum_types[full_type_name] + # Else type_name references a non-local type, which isn't implemented + field = FieldDescriptor( + field_proto.name, full_name, field_proto.number - 1, + field_proto.number, field_proto.type, + FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type), + field_proto.label, None, nested_desc, enum_desc, None, False, None, + options=_OptionsOrNone(field_proto), has_default_value=False, + json_name=json_name, create_key=_internal_create_key) + fields.append(field) + + desc_name = '.'.join(full_message_name) + return Descriptor(desc_proto.name, desc_name, None, None, fields, + list(nested_types.values()), list(enum_types.values()), [], + options=_OptionsOrNone(desc_proto), + create_key=_internal_create_key) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/descriptor_database.py b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_database.py new file mode 100644 index 0000000000..073eddc711 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_database.py @@ -0,0 +1,177 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides a container for DescriptorProtos.""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +import warnings + + +class Error(Exception): + pass + + +class DescriptorDatabaseConflictingDefinitionError(Error): + """Raised when a proto is added with the same name & different descriptor.""" + + +class DescriptorDatabase(object): + """A container accepting FileDescriptorProtos and maps DescriptorProtos.""" + + def __init__(self): + self._file_desc_protos_by_file = {} + self._file_desc_protos_by_symbol = {} + + def Add(self, file_desc_proto): + """Adds the FileDescriptorProto and its types to this database. + + Args: + file_desc_proto: The FileDescriptorProto to add. + Raises: + DescriptorDatabaseConflictingDefinitionError: if an attempt is made to + add a proto with the same name but different definition than an + existing proto in the database. + """ + proto_name = file_desc_proto.name + if proto_name not in self._file_desc_protos_by_file: + self._file_desc_protos_by_file[proto_name] = file_desc_proto + elif self._file_desc_protos_by_file[proto_name] != file_desc_proto: + raise DescriptorDatabaseConflictingDefinitionError( + '%s already added, but with different descriptor.' % proto_name) + else: + return + + # Add all the top-level descriptors to the index. + package = file_desc_proto.package + for message in file_desc_proto.message_type: + for name in _ExtractSymbols(message, package): + self._AddSymbol(name, file_desc_proto) + for enum in file_desc_proto.enum_type: + self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto) + for enum_value in enum.value: + self._file_desc_protos_by_symbol[ + '.'.join((package, enum_value.name))] = file_desc_proto + for extension in file_desc_proto.extension: + self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto) + for service in file_desc_proto.service: + self._AddSymbol(('.'.join((package, service.name))), file_desc_proto) + + def FindFileByName(self, name): + """Finds the file descriptor proto by file name. + + Typically the file name is a relative path ending to a .proto file. The + proto with the given name will have to have been added to this database + using the Add method or else an error will be raised. + + Args: + name: The file name to find. + + Returns: + The file descriptor proto matching the name. + + Raises: + KeyError if no file by the given name was added. + """ + + return self._file_desc_protos_by_file[name] + + def FindFileContainingSymbol(self, symbol): + """Finds the file descriptor proto containing the specified symbol. + + The symbol should be a fully qualified name including the file descriptor's + package and any containing messages. Some examples: + + 'some.package.name.Message' + 'some.package.name.Message.NestedEnum' + 'some.package.name.Message.some_field' + + The file descriptor proto containing the specified symbol must be added to + this database using the Add method or else an error will be raised. + + Args: + symbol: The fully qualified symbol name. + + Returns: + The file descriptor proto containing the symbol. + + Raises: + KeyError if no file contains the specified symbol. + """ + try: + return self._file_desc_protos_by_symbol[symbol] + except KeyError: + # Fields, enum values, and nested extensions are not in + # _file_desc_protos_by_symbol. Try to find the top level + # descriptor. Non-existent nested symbol under a valid top level + # descriptor can also be found. The behavior is the same with + # protobuf C++. + top_level, _, _ = symbol.rpartition('.') + try: + return self._file_desc_protos_by_symbol[top_level] + except KeyError: + # Raise the original symbol as a KeyError for better diagnostics. + raise KeyError(symbol) + + def FindFileContainingExtension(self, extendee_name, extension_number): + # TODO(jieluo): implement this API. + return None + + def FindAllExtensionNumbers(self, extendee_name): + # TODO(jieluo): implement this API. + return [] + + def _AddSymbol(self, name, file_desc_proto): + if name in self._file_desc_protos_by_symbol: + warn_msg = ('Conflict register for file "' + file_desc_proto.name + + '": ' + name + + ' is already defined in file "' + + self._file_desc_protos_by_symbol[name].name + '"') + warnings.warn(warn_msg, RuntimeWarning) + self._file_desc_protos_by_symbol[name] = file_desc_proto + + +def _ExtractSymbols(desc_proto, package): + """Pulls out all the symbols from a descriptor proto. + + Args: + desc_proto: The proto to extract symbols from. + package: The package containing the descriptor type. + + Yields: + The fully qualified name found in the descriptor. + """ + message_name = package + '.' + desc_proto.name if package else desc_proto.name + yield message_name + for nested_type in desc_proto.nested_type: + for symbol in _ExtractSymbols(nested_type, message_name): + yield symbol + for enum_type in desc_proto.enum_type: + yield '.'.join((message_name, enum_type.name)) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pb2.py new file mode 100644 index 0000000000..f570386432 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pb2.py @@ -0,0 +1,1925 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/descriptor.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/descriptor.proto', + package='google.protobuf', + syntax='proto2', + serialized_options=None, + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xbe\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection' + ) +else: + DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xbe\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection') + +if _descriptor._USE_C_DESCRIPTORS == False: + _FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.protobuf.FieldDescriptorProto.Type', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='TYPE_DOUBLE', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FLOAT', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_INT64', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_UINT64', index=3, number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_INT32', index=4, number=5, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FIXED64', index=5, number=6, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FIXED32', index=6, number=7, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_BOOL', index=7, number=8, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_STRING', index=8, number=9, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_GROUP', index=9, number=10, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_MESSAGE', index=10, number=11, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_BYTES', index=11, number=12, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_UINT32', index=12, number=13, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_ENUM', index=13, number=14, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SFIXED32', index=14, number=15, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SFIXED64', index=15, number=16, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SINT32', index=16, number=17, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SINT64', index=17, number=18, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE) + + _FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor( + name='Label', + full_name='google.protobuf.FieldDescriptorProto.Label', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='LABEL_OPTIONAL', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LABEL_REQUIRED', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LABEL_REPEATED', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL) + + _FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor( + name='OptimizeMode', + full_name='google.protobuf.FileOptions.OptimizeMode', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='SPEED', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='CODE_SIZE', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LITE_RUNTIME', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE) + + _FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor( + name='CType', + full_name='google.protobuf.FieldOptions.CType', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='STRING', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='CORD', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='STRING_PIECE', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE) + + _FIELDOPTIONS_JSTYPE = _descriptor.EnumDescriptor( + name='JSType', + full_name='google.protobuf.FieldOptions.JSType', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='JS_NORMAL', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='JS_STRING', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='JS_NUMBER', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JSTYPE) + + _METHODOPTIONS_IDEMPOTENCYLEVEL = _descriptor.EnumDescriptor( + name='IdempotencyLevel', + full_name='google.protobuf.MethodOptions.IdempotencyLevel', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='IDEMPOTENCY_UNKNOWN', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='NO_SIDE_EFFECTS', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='IDEMPOTENT', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_METHODOPTIONS_IDEMPOTENCYLEVEL) + + + _FILEDESCRIPTORSET = _descriptor.Descriptor( + name='FileDescriptorSet', + full_name='google.protobuf.FileDescriptorSet', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _FILEDESCRIPTORPROTO = _descriptor.Descriptor( + name='FileDescriptorProto', + full_name='google.protobuf.FileDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='public_dependency', full_name='google.protobuf.FileDescriptorProto.public_dependency', index=3, + number=10, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='weak_dependency', full_name='google.protobuf.FileDescriptorProto.weak_dependency', index=4, + number=11, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=5, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=6, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='service', full_name='google.protobuf.FileDescriptorProto.service', index=7, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=8, + number=7, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.FileDescriptorProto.options', index=9, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='source_code_info', full_name='google.protobuf.FileDescriptorProto.source_code_info', index=10, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='syntax', full_name='google.protobuf.FileDescriptorProto.syntax', index=11, + number=12, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor( + name='ExtensionRange', + full_name='google.protobuf.DescriptorProto.ExtensionRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.DescriptorProto.ExtensionRange.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _DESCRIPTORPROTO_RESERVEDRANGE = _descriptor.Descriptor( + name='ReservedRange', + full_name='google.protobuf.DescriptorProto.ReservedRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.DescriptorProto.ReservedRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.DescriptorProto.ReservedRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _DESCRIPTORPROTO = _descriptor.Descriptor( + name='DescriptorProto', + full_name='google.protobuf.DescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.DescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='field', full_name='google.protobuf.DescriptorProto.field', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='oneof_decl', full_name='google.protobuf.DescriptorProto.oneof_decl', index=6, + number=8, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.DescriptorProto.options', index=7, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_range', full_name='google.protobuf.DescriptorProto.reserved_range', index=8, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_name', full_name='google.protobuf.DescriptorProto.reserved_name', index=9, + number=10, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, _DESCRIPTORPROTO_RESERVEDRANGE, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _EXTENSIONRANGEOPTIONS = _descriptor.Descriptor( + name='ExtensionRangeOptions', + full_name='google.protobuf.ExtensionRangeOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.ExtensionRangeOptions.uninterpreted_option', index=0, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _FIELDDESCRIPTORPROTO = _descriptor.Descriptor( + name='FieldDescriptorProto', + full_name='google.protobuf.FieldDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6, + number=7, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='oneof_index', full_name='google.protobuf.FieldDescriptorProto.oneof_index', index=7, + number=9, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='json_name', full_name='google.protobuf.FieldDescriptorProto.json_name', index=8, + number=10, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=9, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='proto3_optional', full_name='google.protobuf.FieldDescriptorProto.proto3_optional', index=10, + number=17, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FIELDDESCRIPTORPROTO_TYPE, + _FIELDDESCRIPTORPROTO_LABEL, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ONEOFDESCRIPTORPROTO = _descriptor.Descriptor( + name='OneofDescriptorProto', + full_name='google.protobuf.OneofDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.OneofDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.OneofDescriptorProto.options', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE = _descriptor.Descriptor( + name='EnumReservedRange', + full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _ENUMDESCRIPTORPROTO = _descriptor.Descriptor( + name='EnumDescriptorProto', + full_name='google.protobuf.EnumDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_range', full_name='google.protobuf.EnumDescriptorProto.reserved_range', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_name', full_name='google.protobuf.EnumDescriptorProto.reserved_name', index=4, + number=5, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor( + name='EnumValueDescriptorProto', + full_name='google.protobuf.EnumValueDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _SERVICEDESCRIPTORPROTO = _descriptor.Descriptor( + name='ServiceDescriptorProto', + full_name='google.protobuf.ServiceDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _METHODDESCRIPTORPROTO = _descriptor.Descriptor( + name='MethodDescriptorProto', + full_name='google.protobuf.MethodDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='client_streaming', full_name='google.protobuf.MethodDescriptorProto.client_streaming', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='server_streaming', full_name='google.protobuf.MethodDescriptorProto.server_streaming', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _FILEOPTIONS = _descriptor.Descriptor( + name='FileOptions', + full_name='google.protobuf.FileOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2, + number=10, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_generate_equals_and_hash', full_name='google.protobuf.FileOptions.java_generate_equals_and_hash', index=3, + number=20, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_string_check_utf8', full_name='google.protobuf.FileOptions.java_string_check_utf8', index=4, + number=27, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=5, + number=9, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='go_package', full_name='google.protobuf.FileOptions.go_package', index=6, + number=11, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=7, + number=16, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=8, + number=17, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=9, + number=18, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_generic_services', full_name='google.protobuf.FileOptions.php_generic_services', index=10, + number=42, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.FileOptions.deprecated', index=11, + number=23, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='cc_enable_arenas', full_name='google.protobuf.FileOptions.cc_enable_arenas', index=12, + number=31, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='objc_class_prefix', full_name='google.protobuf.FileOptions.objc_class_prefix', index=13, + number=36, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='csharp_namespace', full_name='google.protobuf.FileOptions.csharp_namespace', index=14, + number=37, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='swift_prefix', full_name='google.protobuf.FileOptions.swift_prefix', index=15, + number=39, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_class_prefix', full_name='google.protobuf.FileOptions.php_class_prefix', index=16, + number=40, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_namespace', full_name='google.protobuf.FileOptions.php_namespace', index=17, + number=41, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_metadata_namespace', full_name='google.protobuf.FileOptions.php_metadata_namespace', index=18, + number=44, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='ruby_package', full_name='google.protobuf.FileOptions.ruby_package', index=19, + number=45, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=20, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FILEOPTIONS_OPTIMIZEMODE, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _MESSAGEOPTIONS = _descriptor.Descriptor( + name='MessageOptions', + full_name='google.protobuf.MessageOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.MessageOptions.deprecated', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='map_entry', full_name='google.protobuf.MessageOptions.map_entry', index=3, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=4, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _FIELDOPTIONS = _descriptor.Descriptor( + name='FieldOptions', + full_name='google.protobuf.FieldOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='packed', full_name='google.protobuf.FieldOptions.packed', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='jstype', full_name='google.protobuf.FieldOptions.jstype', index=2, + number=6, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='lazy', full_name='google.protobuf.FieldOptions.lazy', index=3, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='unverified_lazy', full_name='google.protobuf.FieldOptions.unverified_lazy', index=4, + number=15, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=5, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='weak', full_name='google.protobuf.FieldOptions.weak', index=6, + number=10, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=7, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FIELDOPTIONS_CTYPE, + _FIELDOPTIONS_JSTYPE, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ONEOFOPTIONS = _descriptor.Descriptor( + name='OneofOptions', + full_name='google.protobuf.OneofOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.OneofOptions.uninterpreted_option', index=0, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ENUMOPTIONS = _descriptor.Descriptor( + name='EnumOptions', + full_name='google.protobuf.EnumOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='allow_alias', full_name='google.protobuf.EnumOptions.allow_alias', index=0, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.EnumOptions.deprecated', index=1, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=2, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ENUMVALUEOPTIONS = _descriptor.Descriptor( + name='EnumValueOptions', + full_name='google.protobuf.EnumValueOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.EnumValueOptions.deprecated', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=1, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _SERVICEOPTIONS = _descriptor.Descriptor( + name='ServiceOptions', + full_name='google.protobuf.ServiceOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.ServiceOptions.deprecated', index=0, + number=33, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=1, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _METHODOPTIONS = _descriptor.Descriptor( + name='MethodOptions', + full_name='google.protobuf.MethodOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.MethodOptions.deprecated', index=0, + number=33, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='idempotency_level', full_name='google.protobuf.MethodOptions.idempotency_level', index=1, + number=34, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=2, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _METHODOPTIONS_IDEMPOTENCYLEVEL, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor( + name='NamePart', + full_name='google.protobuf.UninterpretedOption.NamePart', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0, + number=1, type=9, cpp_type=9, label=2, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1, + number=2, type=8, cpp_type=7, label=2, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _UNINTERPRETEDOPTION = _descriptor.Descriptor( + name='UninterpretedOption', + full_name='google.protobuf.UninterpretedOption', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.UninterpretedOption.name', index=0, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2, + number=4, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3, + number=5, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4, + number=6, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5, + number=7, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='aggregate_value', full_name='google.protobuf.UninterpretedOption.aggregate_value', index=6, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _SOURCECODEINFO_LOCATION = _descriptor.Descriptor( + name='Location', + full_name='google.protobuf.SourceCodeInfo.Location', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='google.protobuf.SourceCodeInfo.Location.path', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='span', full_name='google.protobuf.SourceCodeInfo.Location.span', index=1, + number=2, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='leading_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_comments', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='trailing_comments', full_name='google.protobuf.SourceCodeInfo.Location.trailing_comments', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='leading_detached_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_detached_comments', index=4, + number=6, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _SOURCECODEINFO = _descriptor.Descriptor( + name='SourceCodeInfo', + full_name='google.protobuf.SourceCodeInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='location', full_name='google.protobuf.SourceCodeInfo.location', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_SOURCECODEINFO_LOCATION, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _GENERATEDCODEINFO_ANNOTATION = _descriptor.Descriptor( + name='Annotation', + full_name='google.protobuf.GeneratedCodeInfo.Annotation', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='google.protobuf.GeneratedCodeInfo.Annotation.path', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='source_file', full_name='google.protobuf.GeneratedCodeInfo.Annotation.source_file', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='begin', full_name='google.protobuf.GeneratedCodeInfo.Annotation.begin', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.GeneratedCodeInfo.Annotation.end', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _GENERATEDCODEINFO = _descriptor.Descriptor( + name='GeneratedCodeInfo', + full_name='google.protobuf.GeneratedCodeInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='annotation', full_name='google.protobuf.GeneratedCodeInfo.annotation', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_GENERATEDCODEINFO_ANNOTATION, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS + _FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO + _DESCRIPTORPROTO_EXTENSIONRANGE.fields_by_name['options'].message_type = _EXTENSIONRANGEOPTIONS + _DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO_RESERVEDRANGE.containing_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE + _DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS + _DESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _DESCRIPTORPROTO_RESERVEDRANGE + _EXTENSIONRANGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL + _FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE + _FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS + _FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO + _FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO + _ONEOFDESCRIPTORPROTO.fields_by_name['options'].message_type = _ONEOFOPTIONS + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE.containing_type = _ENUMDESCRIPTORPROTO + _ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO + _ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS + _ENUMDESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE + _ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS + _SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO + _SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS + _METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS + _FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE + _FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS + _MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE + _FIELDOPTIONS.fields_by_name['jstype'].enum_type = _FIELDOPTIONS_JSTYPE + _FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS + _FIELDOPTIONS_JSTYPE.containing_type = _FIELDOPTIONS + _ONEOFOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _METHODOPTIONS.fields_by_name['idempotency_level'].enum_type = _METHODOPTIONS_IDEMPOTENCYLEVEL + _METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _METHODOPTIONS_IDEMPOTENCYLEVEL.containing_type = _METHODOPTIONS + _UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION + _UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART + _SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO + _SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION + _GENERATEDCODEINFO_ANNOTATION.containing_type = _GENERATEDCODEINFO + _GENERATEDCODEINFO.fields_by_name['annotation'].message_type = _GENERATEDCODEINFO_ANNOTATION + DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET + DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['ExtensionRangeOptions'] = _EXTENSIONRANGEOPTIONS + DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS + DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS + DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS + DESCRIPTOR.message_types_by_name['OneofOptions'] = _ONEOFOPTIONS + DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS + DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS + DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS + DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS + DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION + DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO + DESCRIPTOR.message_types_by_name['GeneratedCodeInfo'] = _GENERATEDCODEINFO + _sym_db.RegisterFileDescriptor(DESCRIPTOR) + +else: + _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.descriptor_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _FILEDESCRIPTORSET._serialized_start=53 + _FILEDESCRIPTORSET._serialized_end=124 + _FILEDESCRIPTORPROTO._serialized_start=127 + _FILEDESCRIPTORPROTO._serialized_end=602 + _DESCRIPTORPROTO._serialized_start=605 + _DESCRIPTORPROTO._serialized_end=1286 + _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_start=1140 + _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_end=1241 + _DESCRIPTORPROTO_RESERVEDRANGE._serialized_start=1243 + _DESCRIPTORPROTO_RESERVEDRANGE._serialized_end=1286 + _EXTENSIONRANGEOPTIONS._serialized_start=1288 + _EXTENSIONRANGEOPTIONS._serialized_end=1391 + _FIELDDESCRIPTORPROTO._serialized_start=1394 + _FIELDDESCRIPTORPROTO._serialized_end=2119 + _FIELDDESCRIPTORPROTO_TYPE._serialized_start=1740 + _FIELDDESCRIPTORPROTO_TYPE._serialized_end=2050 + _FIELDDESCRIPTORPROTO_LABEL._serialized_start=2052 + _FIELDDESCRIPTORPROTO_LABEL._serialized_end=2119 + _ONEOFDESCRIPTORPROTO._serialized_start=2121 + _ONEOFDESCRIPTORPROTO._serialized_end=2205 + _ENUMDESCRIPTORPROTO._serialized_start=2208 + _ENUMDESCRIPTORPROTO._serialized_end=2500 + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_start=2453 + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_end=2500 + _ENUMVALUEDESCRIPTORPROTO._serialized_start=2502 + _ENUMVALUEDESCRIPTORPROTO._serialized_end=2610 + _SERVICEDESCRIPTORPROTO._serialized_start=2613 + _SERVICEDESCRIPTORPROTO._serialized_end=2757 + _METHODDESCRIPTORPROTO._serialized_start=2760 + _METHODDESCRIPTORPROTO._serialized_end=2953 + _FILEOPTIONS._serialized_start=2956 + _FILEOPTIONS._serialized_end=3761 + _FILEOPTIONS_OPTIMIZEMODE._serialized_start=3686 + _FILEOPTIONS_OPTIMIZEMODE._serialized_end=3744 + _MESSAGEOPTIONS._serialized_start=3764 + _MESSAGEOPTIONS._serialized_end=4024 + _FIELDOPTIONS._serialized_start=4027 + _FIELDOPTIONS._serialized_end=4473 + _FIELDOPTIONS_CTYPE._serialized_start=4354 + _FIELDOPTIONS_CTYPE._serialized_end=4401 + _FIELDOPTIONS_JSTYPE._serialized_start=4403 + _FIELDOPTIONS_JSTYPE._serialized_end=4456 + _ONEOFOPTIONS._serialized_start=4475 + _ONEOFOPTIONS._serialized_end=4569 + _ENUMOPTIONS._serialized_start=4572 + _ENUMOPTIONS._serialized_end=4719 + _ENUMVALUEOPTIONS._serialized_start=4721 + _ENUMVALUEOPTIONS._serialized_end=4846 + _SERVICEOPTIONS._serialized_start=4848 + _SERVICEOPTIONS._serialized_end=4971 + _METHODOPTIONS._serialized_start=4974 + _METHODOPTIONS._serialized_end=5275 + _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_start=5184 + _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_end=5264 + _UNINTERPRETEDOPTION._serialized_start=5278 + _UNINTERPRETEDOPTION._serialized_end=5564 + _UNINTERPRETEDOPTION_NAMEPART._serialized_start=5513 + _UNINTERPRETEDOPTION_NAMEPART._serialized_end=5564 + _SOURCECODEINFO._serialized_start=5567 + _SOURCECODEINFO._serialized_end=5780 + _SOURCECODEINFO_LOCATION._serialized_start=5646 + _SOURCECODEINFO_LOCATION._serialized_end=5780 + _GENERATEDCODEINFO._serialized_start=5783 + _GENERATEDCODEINFO._serialized_end=5950 + _GENERATEDCODEINFO_ANNOTATION._serialized_start=5871 + _GENERATEDCODEINFO_ANNOTATION._serialized_end=5950 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pool.py b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pool.py new file mode 100644 index 0000000000..911372a8b0 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pool.py @@ -0,0 +1,1295 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides DescriptorPool to use as a container for proto2 descriptors. + +The DescriptorPool is used in conjection with a DescriptorDatabase to maintain +a collection of protocol buffer descriptors for use when dynamically creating +message types at runtime. + +For most applications protocol buffers should be used via modules generated by +the protocol buffer compiler tool. This should only be used when the type of +protocol buffers used in an application or library cannot be predetermined. + +Below is a straightforward example on how to use this class:: + + pool = DescriptorPool() + file_descriptor_protos = [ ... ] + for file_descriptor_proto in file_descriptor_protos: + pool.Add(file_descriptor_proto) + my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType') + +The message descriptor can be used in conjunction with the message_factory +module in order to create a protocol buffer class that can be encoded and +decoded. + +If you want to get a Python class for the specified proto, use the +helper functions inside google.protobuf.message_factory +directly instead of this class. +""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +import collections +import warnings + +from google.protobuf import descriptor +from google.protobuf import descriptor_database +from google.protobuf import text_encoding + + +_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access + + +def _Deprecated(func): + """Mark functions as deprecated.""" + + def NewFunc(*args, **kwargs): + warnings.warn( + 'Call to deprecated function %s(). Note: Do add unlinked descriptors ' + 'to descriptor_pool is wrong. Use Add() or AddSerializedFile() ' + 'instead.' % func.__name__, + category=DeprecationWarning) + return func(*args, **kwargs) + NewFunc.__name__ = func.__name__ + NewFunc.__doc__ = func.__doc__ + NewFunc.__dict__.update(func.__dict__) + return NewFunc + + +def _NormalizeFullyQualifiedName(name): + """Remove leading period from fully-qualified type name. + + Due to b/13860351 in descriptor_database.py, types in the root namespace are + generated with a leading period. This function removes that prefix. + + Args: + name (str): The fully-qualified symbol name. + + Returns: + str: The normalized fully-qualified symbol name. + """ + return name.lstrip('.') + + +def _OptionsOrNone(descriptor_proto): + """Returns the value of the field `options`, or None if it is not set.""" + if descriptor_proto.HasField('options'): + return descriptor_proto.options + else: + return None + + +def _IsMessageSetExtension(field): + return (field.is_extension and + field.containing_type.has_options and + field.containing_type.GetOptions().message_set_wire_format and + field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL) + + +class DescriptorPool(object): + """A collection of protobufs dynamically constructed by descriptor protos.""" + + if _USE_C_DESCRIPTORS: + + def __new__(cls, descriptor_db=None): + # pylint: disable=protected-access + return descriptor._message.DescriptorPool(descriptor_db) + + def __init__(self, descriptor_db=None): + """Initializes a Pool of proto buffs. + + The descriptor_db argument to the constructor is provided to allow + specialized file descriptor proto lookup code to be triggered on demand. An + example would be an implementation which will read and compile a file + specified in a call to FindFileByName() and not require the call to Add() + at all. Results from this database will be cached internally here as well. + + Args: + descriptor_db: A secondary source of file descriptors. + """ + + self._internal_db = descriptor_database.DescriptorDatabase() + self._descriptor_db = descriptor_db + self._descriptors = {} + self._enum_descriptors = {} + self._service_descriptors = {} + self._file_descriptors = {} + self._toplevel_extensions = {} + # TODO(jieluo): Remove _file_desc_by_toplevel_extension after + # maybe year 2020 for compatibility issue (with 3.4.1 only). + self._file_desc_by_toplevel_extension = {} + self._top_enum_values = {} + # We store extensions in two two-level mappings: The first key is the + # descriptor of the message being extended, the second key is the extension + # full name or its tag number. + self._extensions_by_name = collections.defaultdict(dict) + self._extensions_by_number = collections.defaultdict(dict) + + def _CheckConflictRegister(self, desc, desc_name, file_name): + """Check if the descriptor name conflicts with another of the same name. + + Args: + desc: Descriptor of a message, enum, service, extension or enum value. + desc_name (str): the full name of desc. + file_name (str): The file name of descriptor. + """ + for register, descriptor_type in [ + (self._descriptors, descriptor.Descriptor), + (self._enum_descriptors, descriptor.EnumDescriptor), + (self._service_descriptors, descriptor.ServiceDescriptor), + (self._toplevel_extensions, descriptor.FieldDescriptor), + (self._top_enum_values, descriptor.EnumValueDescriptor)]: + if desc_name in register: + old_desc = register[desc_name] + if isinstance(old_desc, descriptor.EnumValueDescriptor): + old_file = old_desc.type.file.name + else: + old_file = old_desc.file.name + + if not isinstance(desc, descriptor_type) or ( + old_file != file_name): + error_msg = ('Conflict register for file "' + file_name + + '": ' + desc_name + + ' is already defined in file "' + + old_file + '". Please fix the conflict by adding ' + 'package name on the proto file, or use different ' + 'name for the duplication.') + if isinstance(desc, descriptor.EnumValueDescriptor): + error_msg += ('\nNote: enum values appear as ' + 'siblings of the enum type instead of ' + 'children of it.') + + raise TypeError(error_msg) + + return + + def Add(self, file_desc_proto): + """Adds the FileDescriptorProto and its types to this pool. + + Args: + file_desc_proto (FileDescriptorProto): The file descriptor to add. + """ + + self._internal_db.Add(file_desc_proto) + + def AddSerializedFile(self, serialized_file_desc_proto): + """Adds the FileDescriptorProto and its types to this pool. + + Args: + serialized_file_desc_proto (bytes): A bytes string, serialization of the + :class:`FileDescriptorProto` to add. + + Returns: + FileDescriptor: Descriptor for the added file. + """ + + # pylint: disable=g-import-not-at-top + from google.protobuf import descriptor_pb2 + file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString( + serialized_file_desc_proto) + file_desc = self._ConvertFileProtoToFileDescriptor(file_desc_proto) + file_desc.serialized_pb = serialized_file_desc_proto + return file_desc + + # Add Descriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddDescriptor(self, desc): + self._AddDescriptor(desc) + + # Never call this method. It is for internal usage only. + def _AddDescriptor(self, desc): + """Adds a Descriptor to the pool, non-recursively. + + If the Descriptor contains nested messages or enums, the caller must + explicitly register them. This method also registers the FileDescriptor + associated with the message. + + Args: + desc: A Descriptor. + """ + if not isinstance(desc, descriptor.Descriptor): + raise TypeError('Expected instance of descriptor.Descriptor.') + + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + + self._descriptors[desc.full_name] = desc + self._AddFileDescriptor(desc.file) + + # Add EnumDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddEnumDescriptor(self, enum_desc): + self._AddEnumDescriptor(enum_desc) + + # Never call this method. It is for internal usage only. + def _AddEnumDescriptor(self, enum_desc): + """Adds an EnumDescriptor to the pool. + + This method also registers the FileDescriptor associated with the enum. + + Args: + enum_desc: An EnumDescriptor. + """ + + if not isinstance(enum_desc, descriptor.EnumDescriptor): + raise TypeError('Expected instance of descriptor.EnumDescriptor.') + + file_name = enum_desc.file.name + self._CheckConflictRegister(enum_desc, enum_desc.full_name, file_name) + self._enum_descriptors[enum_desc.full_name] = enum_desc + + # Top enum values need to be indexed. + # Count the number of dots to see whether the enum is toplevel or nested + # in a message. We cannot use enum_desc.containing_type at this stage. + if enum_desc.file.package: + top_level = (enum_desc.full_name.count('.') + - enum_desc.file.package.count('.') == 1) + else: + top_level = enum_desc.full_name.count('.') == 0 + if top_level: + file_name = enum_desc.file.name + package = enum_desc.file.package + for enum_value in enum_desc.values: + full_name = _NormalizeFullyQualifiedName( + '.'.join((package, enum_value.name))) + self._CheckConflictRegister(enum_value, full_name, file_name) + self._top_enum_values[full_name] = enum_value + self._AddFileDescriptor(enum_desc.file) + + # Add ServiceDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddServiceDescriptor(self, service_desc): + self._AddServiceDescriptor(service_desc) + + # Never call this method. It is for internal usage only. + def _AddServiceDescriptor(self, service_desc): + """Adds a ServiceDescriptor to the pool. + + Args: + service_desc: A ServiceDescriptor. + """ + + if not isinstance(service_desc, descriptor.ServiceDescriptor): + raise TypeError('Expected instance of descriptor.ServiceDescriptor.') + + self._CheckConflictRegister(service_desc, service_desc.full_name, + service_desc.file.name) + self._service_descriptors[service_desc.full_name] = service_desc + + # Add ExtensionDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddExtensionDescriptor(self, extension): + self._AddExtensionDescriptor(extension) + + # Never call this method. It is for internal usage only. + def _AddExtensionDescriptor(self, extension): + """Adds a FieldDescriptor describing an extension to the pool. + + Args: + extension: A FieldDescriptor. + + Raises: + AssertionError: when another extension with the same number extends the + same message. + TypeError: when the specified extension is not a + descriptor.FieldDescriptor. + """ + if not (isinstance(extension, descriptor.FieldDescriptor) and + extension.is_extension): + raise TypeError('Expected an extension descriptor.') + + if extension.extension_scope is None: + self._toplevel_extensions[extension.full_name] = extension + + try: + existing_desc = self._extensions_by_number[ + extension.containing_type][extension.number] + except KeyError: + pass + else: + if extension is not existing_desc: + raise AssertionError( + 'Extensions "%s" and "%s" both try to extend message type "%s" ' + 'with field number %d.' % + (extension.full_name, existing_desc.full_name, + extension.containing_type.full_name, extension.number)) + + self._extensions_by_number[extension.containing_type][ + extension.number] = extension + self._extensions_by_name[extension.containing_type][ + extension.full_name] = extension + + # Also register MessageSet extensions with the type name. + if _IsMessageSetExtension(extension): + self._extensions_by_name[extension.containing_type][ + extension.message_type.full_name] = extension + + @_Deprecated + def AddFileDescriptor(self, file_desc): + self._InternalAddFileDescriptor(file_desc) + + # Never call this method. It is for internal usage only. + def _InternalAddFileDescriptor(self, file_desc): + """Adds a FileDescriptor to the pool, non-recursively. + + If the FileDescriptor contains messages or enums, the caller must explicitly + register them. + + Args: + file_desc: A FileDescriptor. + """ + + self._AddFileDescriptor(file_desc) + # TODO(jieluo): This is a temporary solution for FieldDescriptor.file. + # FieldDescriptor.file is added in code gen. Remove this solution after + # maybe 2020 for compatibility reason (with 3.4.1 only). + for extension in file_desc.extensions_by_name.values(): + self._file_desc_by_toplevel_extension[ + extension.full_name] = file_desc + + def _AddFileDescriptor(self, file_desc): + """Adds a FileDescriptor to the pool, non-recursively. + + If the FileDescriptor contains messages or enums, the caller must explicitly + register them. + + Args: + file_desc: A FileDescriptor. + """ + + if not isinstance(file_desc, descriptor.FileDescriptor): + raise TypeError('Expected instance of descriptor.FileDescriptor.') + self._file_descriptors[file_desc.name] = file_desc + + def FindFileByName(self, file_name): + """Gets a FileDescriptor by file name. + + Args: + file_name (str): The path to the file to get a descriptor for. + + Returns: + FileDescriptor: The descriptor for the named file. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + + try: + return self._file_descriptors[file_name] + except KeyError: + pass + + try: + file_proto = self._internal_db.FindFileByName(file_name) + except KeyError as error: + if self._descriptor_db: + file_proto = self._descriptor_db.FindFileByName(file_name) + else: + raise error + if not file_proto: + raise KeyError('Cannot find a file named %s' % file_name) + return self._ConvertFileProtoToFileDescriptor(file_proto) + + def FindFileContainingSymbol(self, symbol): + """Gets the FileDescriptor for the file containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: Descriptor for the file that contains the specified + symbol. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + + symbol = _NormalizeFullyQualifiedName(symbol) + try: + return self._InternalFindFileContainingSymbol(symbol) + except KeyError: + pass + + try: + # Try fallback database. Build and find again if possible. + self._FindFileContainingSymbolInDb(symbol) + return self._InternalFindFileContainingSymbol(symbol) + except KeyError: + raise KeyError('Cannot find a file containing %s' % symbol) + + def _InternalFindFileContainingSymbol(self, symbol): + """Gets the already built FileDescriptor containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: Descriptor for the file that contains the specified + symbol. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + try: + return self._descriptors[symbol].file + except KeyError: + pass + + try: + return self._enum_descriptors[symbol].file + except KeyError: + pass + + try: + return self._service_descriptors[symbol].file + except KeyError: + pass + + try: + return self._top_enum_values[symbol].type.file + except KeyError: + pass + + try: + return self._file_desc_by_toplevel_extension[symbol] + except KeyError: + pass + + # Try fields, enum values and nested extensions inside a message. + top_name, _, sub_name = symbol.rpartition('.') + try: + message = self.FindMessageTypeByName(top_name) + assert (sub_name in message.extensions_by_name or + sub_name in message.fields_by_name or + sub_name in message.enum_values_by_name) + return message.file + except (KeyError, AssertionError): + raise KeyError('Cannot find a file containing %s' % symbol) + + def FindMessageTypeByName(self, full_name): + """Loads the named descriptor from the pool. + + Args: + full_name (str): The full name of the descriptor to load. + + Returns: + Descriptor: The descriptor for the named type. + + Raises: + KeyError: if the message cannot be found in the pool. + """ + + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._descriptors[full_name] + + def FindEnumTypeByName(self, full_name): + """Loads the named enum descriptor from the pool. + + Args: + full_name (str): The full name of the enum descriptor to load. + + Returns: + EnumDescriptor: The enum descriptor for the named type. + + Raises: + KeyError: if the enum cannot be found in the pool. + """ + + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._enum_descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._enum_descriptors[full_name] + + def FindFieldByName(self, full_name): + """Loads the named field descriptor from the pool. + + Args: + full_name (str): The full name of the field descriptor to load. + + Returns: + FieldDescriptor: The field descriptor for the named field. + + Raises: + KeyError: if the field cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + message_name, _, field_name = full_name.rpartition('.') + message_descriptor = self.FindMessageTypeByName(message_name) + return message_descriptor.fields_by_name[field_name] + + def FindOneofByName(self, full_name): + """Loads the named oneof descriptor from the pool. + + Args: + full_name (str): The full name of the oneof descriptor to load. + + Returns: + OneofDescriptor: The oneof descriptor for the named oneof. + + Raises: + KeyError: if the oneof cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + message_name, _, oneof_name = full_name.rpartition('.') + message_descriptor = self.FindMessageTypeByName(message_name) + return message_descriptor.oneofs_by_name[oneof_name] + + def FindExtensionByName(self, full_name): + """Loads the named extension descriptor from the pool. + + Args: + full_name (str): The full name of the extension descriptor to load. + + Returns: + FieldDescriptor: The field descriptor for the named extension. + + Raises: + KeyError: if the extension cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + try: + # The proto compiler does not give any link between the FileDescriptor + # and top-level extensions unless the FileDescriptorProto is added to + # the DescriptorDatabase, but this can impact memory usage. + # So we registered these extensions by name explicitly. + return self._toplevel_extensions[full_name] + except KeyError: + pass + message_name, _, extension_name = full_name.rpartition('.') + try: + # Most extensions are nested inside a message. + scope = self.FindMessageTypeByName(message_name) + except KeyError: + # Some extensions are defined at file scope. + scope = self._FindFileContainingSymbolInDb(full_name) + return scope.extensions_by_name[extension_name] + + def FindExtensionByNumber(self, message_descriptor, number): + """Gets the extension of the specified message with the specified number. + + Extensions have to be registered to this pool by calling :func:`Add` or + :func:`AddExtensionDescriptor`. + + Args: + message_descriptor (Descriptor): descriptor of the extended message. + number (int): Number of the extension field. + + Returns: + FieldDescriptor: The descriptor for the extension. + + Raises: + KeyError: when no extension with the given number is known for the + specified message. + """ + try: + return self._extensions_by_number[message_descriptor][number] + except KeyError: + self._TryLoadExtensionFromDB(message_descriptor, number) + return self._extensions_by_number[message_descriptor][number] + + def FindAllExtensions(self, message_descriptor): + """Gets all the known extensions of a given message. + + Extensions have to be registered to this pool by build related + :func:`Add` or :func:`AddExtensionDescriptor`. + + Args: + message_descriptor (Descriptor): Descriptor of the extended message. + + Returns: + list[FieldDescriptor]: Field descriptors describing the extensions. + """ + # Fallback to descriptor db if FindAllExtensionNumbers is provided. + if self._descriptor_db and hasattr( + self._descriptor_db, 'FindAllExtensionNumbers'): + full_name = message_descriptor.full_name + all_numbers = self._descriptor_db.FindAllExtensionNumbers(full_name) + for number in all_numbers: + if number in self._extensions_by_number[message_descriptor]: + continue + self._TryLoadExtensionFromDB(message_descriptor, number) + + return list(self._extensions_by_number[message_descriptor].values()) + + def _TryLoadExtensionFromDB(self, message_descriptor, number): + """Try to Load extensions from descriptor db. + + Args: + message_descriptor: descriptor of the extended message. + number: the extension number that needs to be loaded. + """ + if not self._descriptor_db: + return + # Only supported when FindFileContainingExtension is provided. + if not hasattr( + self._descriptor_db, 'FindFileContainingExtension'): + return + + full_name = message_descriptor.full_name + file_proto = self._descriptor_db.FindFileContainingExtension( + full_name, number) + + if file_proto is None: + return + + try: + self._ConvertFileProtoToFileDescriptor(file_proto) + except: + warn_msg = ('Unable to load proto file %s for extension number %d.' % + (file_proto.name, number)) + warnings.warn(warn_msg, RuntimeWarning) + + def FindServiceByName(self, full_name): + """Loads the named service descriptor from the pool. + + Args: + full_name (str): The full name of the service descriptor to load. + + Returns: + ServiceDescriptor: The service descriptor for the named service. + + Raises: + KeyError: if the service cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._service_descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._service_descriptors[full_name] + + def FindMethodByName(self, full_name): + """Loads the named service method descriptor from the pool. + + Args: + full_name (str): The full name of the method descriptor to load. + + Returns: + MethodDescriptor: The method descriptor for the service method. + + Raises: + KeyError: if the method cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + service_name, _, method_name = full_name.rpartition('.') + service_descriptor = self.FindServiceByName(service_name) + return service_descriptor.methods_by_name[method_name] + + def _FindFileContainingSymbolInDb(self, symbol): + """Finds the file in descriptor DB containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: The file that contains the specified symbol. + + Raises: + KeyError: if the file cannot be found in the descriptor database. + """ + try: + file_proto = self._internal_db.FindFileContainingSymbol(symbol) + except KeyError as error: + if self._descriptor_db: + file_proto = self._descriptor_db.FindFileContainingSymbol(symbol) + else: + raise error + if not file_proto: + raise KeyError('Cannot find a file containing %s' % symbol) + return self._ConvertFileProtoToFileDescriptor(file_proto) + + def _ConvertFileProtoToFileDescriptor(self, file_proto): + """Creates a FileDescriptor from a proto or returns a cached copy. + + This method also has the side effect of loading all the symbols found in + the file into the appropriate dictionaries in the pool. + + Args: + file_proto: The proto to convert. + + Returns: + A FileDescriptor matching the passed in proto. + """ + if file_proto.name not in self._file_descriptors: + built_deps = list(self._GetDeps(file_proto.dependency)) + direct_deps = [self.FindFileByName(n) for n in file_proto.dependency] + public_deps = [direct_deps[i] for i in file_proto.public_dependency] + + file_descriptor = descriptor.FileDescriptor( + pool=self, + name=file_proto.name, + package=file_proto.package, + syntax=file_proto.syntax, + options=_OptionsOrNone(file_proto), + serialized_pb=file_proto.SerializeToString(), + dependencies=direct_deps, + public_dependencies=public_deps, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + scope = {} + + # This loop extracts all the message and enum types from all the + # dependencies of the file_proto. This is necessary to create the + # scope of available message types when defining the passed in + # file proto. + for dependency in built_deps: + scope.update(self._ExtractSymbols( + dependency.message_types_by_name.values())) + scope.update((_PrefixWithDot(enum.full_name), enum) + for enum in dependency.enum_types_by_name.values()) + + for message_type in file_proto.message_type: + message_desc = self._ConvertMessageDescriptor( + message_type, file_proto.package, file_descriptor, scope, + file_proto.syntax) + file_descriptor.message_types_by_name[message_desc.name] = ( + message_desc) + + for enum_type in file_proto.enum_type: + file_descriptor.enum_types_by_name[enum_type.name] = ( + self._ConvertEnumDescriptor(enum_type, file_proto.package, + file_descriptor, None, scope, True)) + + for index, extension_proto in enumerate(file_proto.extension): + extension_desc = self._MakeFieldDescriptor( + extension_proto, file_proto.package, index, file_descriptor, + is_extension=True) + extension_desc.containing_type = self._GetTypeFromScope( + file_descriptor.package, extension_proto.extendee, scope) + self._SetFieldType(extension_proto, extension_desc, + file_descriptor.package, scope) + file_descriptor.extensions_by_name[extension_desc.name] = ( + extension_desc) + self._file_desc_by_toplevel_extension[extension_desc.full_name] = ( + file_descriptor) + + for desc_proto in file_proto.message_type: + self._SetAllFieldTypes(file_proto.package, desc_proto, scope) + + if file_proto.package: + desc_proto_prefix = _PrefixWithDot(file_proto.package) + else: + desc_proto_prefix = '' + + for desc_proto in file_proto.message_type: + desc = self._GetTypeFromScope( + desc_proto_prefix, desc_proto.name, scope) + file_descriptor.message_types_by_name[desc_proto.name] = desc + + for index, service_proto in enumerate(file_proto.service): + file_descriptor.services_by_name[service_proto.name] = ( + self._MakeServiceDescriptor(service_proto, index, scope, + file_proto.package, file_descriptor)) + + self._file_descriptors[file_proto.name] = file_descriptor + + # Add extensions to the pool + file_desc = self._file_descriptors[file_proto.name] + for extension in file_desc.extensions_by_name.values(): + self._AddExtensionDescriptor(extension) + for message_type in file_desc.message_types_by_name.values(): + for extension in message_type.extensions: + self._AddExtensionDescriptor(extension) + + return file_desc + + def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None, + scope=None, syntax=None): + """Adds the proto to the pool in the specified package. + + Args: + desc_proto: The descriptor_pb2.DescriptorProto protobuf message. + package: The package the proto should be located in. + file_desc: The file containing this message. + scope: Dict mapping short and full symbols to message and enum types. + syntax: string indicating syntax of the file ("proto2" or "proto3") + + Returns: + The added descriptor. + """ + + if package: + desc_name = '.'.join((package, desc_proto.name)) + else: + desc_name = desc_proto.name + + if file_desc is None: + file_name = None + else: + file_name = file_desc.name + + if scope is None: + scope = {} + + nested = [ + self._ConvertMessageDescriptor( + nested, desc_name, file_desc, scope, syntax) + for nested in desc_proto.nested_type] + enums = [ + self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, + scope, False) + for enum in desc_proto.enum_type] + fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc) + for index, field in enumerate(desc_proto.field)] + extensions = [ + self._MakeFieldDescriptor(extension, desc_name, index, file_desc, + is_extension=True) + for index, extension in enumerate(desc_proto.extension)] + oneofs = [ + # pylint: disable=g-complex-comprehension + descriptor.OneofDescriptor( + desc.name, + '.'.join((desc_name, desc.name)), + index, + None, + [], + _OptionsOrNone(desc), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + for index, desc in enumerate(desc_proto.oneof_decl) + ] + extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range] + if extension_ranges: + is_extendable = True + else: + is_extendable = False + desc = descriptor.Descriptor( + name=desc_proto.name, + full_name=desc_name, + filename=file_name, + containing_type=None, + fields=fields, + oneofs=oneofs, + nested_types=nested, + enum_types=enums, + extensions=extensions, + options=_OptionsOrNone(desc_proto), + is_extendable=is_extendable, + extension_ranges=extension_ranges, + file=file_desc, + serialized_start=None, + serialized_end=None, + syntax=syntax, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + for nested in desc.nested_types: + nested.containing_type = desc + for enum in desc.enum_types: + enum.containing_type = desc + for field_index, field_desc in enumerate(desc_proto.field): + if field_desc.HasField('oneof_index'): + oneof_index = field_desc.oneof_index + oneofs[oneof_index].fields.append(fields[field_index]) + fields[field_index].containing_oneof = oneofs[oneof_index] + + scope[_PrefixWithDot(desc_name)] = desc + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._descriptors[desc_name] = desc + return desc + + def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None, + containing_type=None, scope=None, top_level=False): + """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. + + Args: + enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. + package: Optional package name for the new message EnumDescriptor. + file_desc: The file containing the enum descriptor. + containing_type: The type containing this enum. + scope: Scope containing available types. + top_level: If True, the enum is a top level symbol. If False, the enum + is defined inside a message. + + Returns: + The added descriptor + """ + + if package: + enum_name = '.'.join((package, enum_proto.name)) + else: + enum_name = enum_proto.name + + if file_desc is None: + file_name = None + else: + file_name = file_desc.name + + values = [self._MakeEnumValueDescriptor(value, index) + for index, value in enumerate(enum_proto.value)] + desc = descriptor.EnumDescriptor(name=enum_proto.name, + full_name=enum_name, + filename=file_name, + file=file_desc, + values=values, + containing_type=containing_type, + options=_OptionsOrNone(enum_proto), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + scope['.%s' % enum_name] = desc + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._enum_descriptors[enum_name] = desc + + # Add top level enum values. + if top_level: + for value in values: + full_name = _NormalizeFullyQualifiedName( + '.'.join((package, value.name))) + self._CheckConflictRegister(value, full_name, file_name) + self._top_enum_values[full_name] = value + + return desc + + def _MakeFieldDescriptor(self, field_proto, message_name, index, + file_desc, is_extension=False): + """Creates a field descriptor from a FieldDescriptorProto. + + For message and enum type fields, this method will do a look up + in the pool for the appropriate descriptor for that type. If it + is unavailable, it will fall back to the _source function to + create it. If this type is still unavailable, construction will + fail. + + Args: + field_proto: The proto describing the field. + message_name: The name of the containing message. + index: Index of the field + file_desc: The file containing the field descriptor. + is_extension: Indication that this field is for an extension. + + Returns: + An initialized FieldDescriptor object + """ + + if message_name: + full_name = '.'.join((message_name, field_proto.name)) + else: + full_name = field_proto.name + + if field_proto.json_name: + json_name = field_proto.json_name + else: + json_name = None + + return descriptor.FieldDescriptor( + name=field_proto.name, + full_name=full_name, + index=index, + number=field_proto.number, + type=field_proto.type, + cpp_type=None, + message_type=None, + enum_type=None, + containing_type=None, + label=field_proto.label, + has_default_value=False, + default_value=None, + is_extension=is_extension, + extension_scope=None, + options=_OptionsOrNone(field_proto), + json_name=json_name, + file=file_desc, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _SetAllFieldTypes(self, package, desc_proto, scope): + """Sets all the descriptor's fields's types. + + This method also sets the containing types on any extensions. + + Args: + package: The current package of desc_proto. + desc_proto: The message descriptor to update. + scope: Enclosing scope of available types. + """ + + package = _PrefixWithDot(package) + + main_desc = self._GetTypeFromScope(package, desc_proto.name, scope) + + if package == '.': + nested_package = _PrefixWithDot(desc_proto.name) + else: + nested_package = '.'.join([package, desc_proto.name]) + + for field_proto, field_desc in zip(desc_proto.field, main_desc.fields): + self._SetFieldType(field_proto, field_desc, nested_package, scope) + + for extension_proto, extension_desc in ( + zip(desc_proto.extension, main_desc.extensions)): + extension_desc.containing_type = self._GetTypeFromScope( + nested_package, extension_proto.extendee, scope) + self._SetFieldType(extension_proto, extension_desc, nested_package, scope) + + for nested_type in desc_proto.nested_type: + self._SetAllFieldTypes(nested_package, nested_type, scope) + + def _SetFieldType(self, field_proto, field_desc, package, scope): + """Sets the field's type, cpp_type, message_type and enum_type. + + Args: + field_proto: Data about the field in proto format. + field_desc: The descriptor to modify. + package: The package the field's container is in. + scope: Enclosing scope of available types. + """ + if field_proto.type_name: + desc = self._GetTypeFromScope(package, field_proto.type_name, scope) + else: + desc = None + + if not field_proto.HasField('type'): + if isinstance(desc, descriptor.Descriptor): + field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE + else: + field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM + + field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType( + field_proto.type) + + if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE + or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP): + field_desc.message_type = desc + + if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.enum_type = desc + + if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED: + field_desc.has_default_value = False + field_desc.default_value = [] + elif field_proto.HasField('default_value'): + field_desc.has_default_value = True + if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or + field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): + field_desc.default_value = float(field_proto.default_value) + elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: + field_desc.default_value = field_proto.default_value + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: + field_desc.default_value = field_proto.default_value.lower() == 'true' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.default_value = field_desc.enum_type.values_by_name[ + field_proto.default_value].number + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: + field_desc.default_value = text_encoding.CUnescape( + field_proto.default_value) + elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE: + field_desc.default_value = None + else: + # All other types are of the "int" type. + field_desc.default_value = int(field_proto.default_value) + else: + field_desc.has_default_value = False + if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or + field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): + field_desc.default_value = 0.0 + elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: + field_desc.default_value = u'' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: + field_desc.default_value = False + elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.default_value = field_desc.enum_type.values[0].number + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: + field_desc.default_value = b'' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE: + field_desc.default_value = None + elif field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP: + field_desc.default_value = None + else: + # All other types are of the "int" type. + field_desc.default_value = 0 + + field_desc.type = field_proto.type + + def _MakeEnumValueDescriptor(self, value_proto, index): + """Creates a enum value descriptor object from a enum value proto. + + Args: + value_proto: The proto describing the enum value. + index: The index of the enum value. + + Returns: + An initialized EnumValueDescriptor object. + """ + + return descriptor.EnumValueDescriptor( + name=value_proto.name, + index=index, + number=value_proto.number, + options=_OptionsOrNone(value_proto), + type=None, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _MakeServiceDescriptor(self, service_proto, service_index, scope, + package, file_desc): + """Make a protobuf ServiceDescriptor given a ServiceDescriptorProto. + + Args: + service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message. + service_index: The index of the service in the File. + scope: Dict mapping short and full symbols to message and enum types. + package: Optional package name for the new message EnumDescriptor. + file_desc: The file containing the service descriptor. + + Returns: + The added descriptor. + """ + + if package: + service_name = '.'.join((package, service_proto.name)) + else: + service_name = service_proto.name + + methods = [self._MakeMethodDescriptor(method_proto, service_name, package, + scope, index) + for index, method_proto in enumerate(service_proto.method)] + desc = descriptor.ServiceDescriptor( + name=service_proto.name, + full_name=service_name, + index=service_index, + methods=methods, + options=_OptionsOrNone(service_proto), + file=file_desc, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._service_descriptors[service_name] = desc + return desc + + def _MakeMethodDescriptor(self, method_proto, service_name, package, scope, + index): + """Creates a method descriptor from a MethodDescriptorProto. + + Args: + method_proto: The proto describing the method. + service_name: The name of the containing service. + package: Optional package name to look up for types. + scope: Scope containing available types. + index: Index of the method in the service. + + Returns: + An initialized MethodDescriptor object. + """ + full_name = '.'.join((service_name, method_proto.name)) + input_type = self._GetTypeFromScope( + package, method_proto.input_type, scope) + output_type = self._GetTypeFromScope( + package, method_proto.output_type, scope) + return descriptor.MethodDescriptor( + name=method_proto.name, + full_name=full_name, + index=index, + containing_service=None, + input_type=input_type, + output_type=output_type, + client_streaming=method_proto.client_streaming, + server_streaming=method_proto.server_streaming, + options=_OptionsOrNone(method_proto), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _ExtractSymbols(self, descriptors): + """Pulls out all the symbols from descriptor protos. + + Args: + descriptors: The messages to extract descriptors from. + Yields: + A two element tuple of the type name and descriptor object. + """ + + for desc in descriptors: + yield (_PrefixWithDot(desc.full_name), desc) + for symbol in self._ExtractSymbols(desc.nested_types): + yield symbol + for enum in desc.enum_types: + yield (_PrefixWithDot(enum.full_name), enum) + + def _GetDeps(self, dependencies, visited=None): + """Recursively finds dependencies for file protos. + + Args: + dependencies: The names of the files being depended on. + visited: The names of files already found. + + Yields: + Each direct and indirect dependency. + """ + + visited = visited or set() + for dependency in dependencies: + if dependency not in visited: + visited.add(dependency) + dep_desc = self.FindFileByName(dependency) + yield dep_desc + public_files = [d.name for d in dep_desc.public_dependencies] + yield from self._GetDeps(public_files, visited) + + def _GetTypeFromScope(self, package, type_name, scope): + """Finds a given type name in the current scope. + + Args: + package: The package the proto should be located in. + type_name: The name of the type to be found in the scope. + scope: Dict mapping short and full symbols to message and enum types. + + Returns: + The descriptor for the requested type. + """ + if type_name not in scope: + components = _PrefixWithDot(package).split('.') + while components: + possible_match = '.'.join(components + [type_name]) + if possible_match in scope: + type_name = possible_match + break + else: + components.pop(-1) + return scope[type_name] + + +def _PrefixWithDot(name): + return name if name.startswith('.') else '.%s' % name + + +if _USE_C_DESCRIPTORS: + # TODO(amauryfa): This pool could be constructed from Python code, when we + # support a flag like 'use_cpp_generated_pool=True'. + # pylint: disable=protected-access + _DEFAULT = descriptor._message.default_pool +else: + _DEFAULT = DescriptorPool() + + +def Default(): + return _DEFAULT diff --git a/openpype/hosts/nuke/vendor/google/protobuf/duration_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/duration_pb2.py new file mode 100644 index 0000000000..a8ecc07bdf --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/duration_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/duration.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _DURATION._serialized_start=51 + _DURATION._serialized_end=93 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/empty_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/empty_pb2.py new file mode 100644 index 0000000000..0b4d554db3 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/empty_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/empty.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _EMPTY._serialized_start=48 + _EMPTY._serialized_end=55 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/field_mask_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/field_mask_pb2.py new file mode 100644 index 0000000000..80a4e96e59 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/field_mask_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/field_mask.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _FIELDMASK._serialized_start=53 + _FIELDMASK._serialized_end=79 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/__init__.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/_parameterized.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/_parameterized.py new file mode 100644 index 0000000000..afdbb78c36 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/_parameterized.py @@ -0,0 +1,443 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Adds support for parameterized tests to Python's unittest TestCase class. + +A parameterized test is a method in a test case that is invoked with different +argument tuples. + +A simple example: + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + (1, 2, 3), + (4, 5, 9), + (1, 1, 3)) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + + +Each invocation is a separate test case and properly isolated just +like a normal test method, with its own setUp/tearDown cycle. In the +example above, there are three separate testcases, one of which will +fail due to an assertion error (1 + 1 != 3). + +Parameters for individual test cases can be tuples (with positional parameters) +or dictionaries (with named parameters): + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + {'op1': 1, 'op2': 2, 'result': 3}, + {'op1': 4, 'op2': 5, 'result': 9}, + ) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + +If a parameterized test fails, the error message will show the +original test name (which is modified internally) and the arguments +for the specific invocation, which are part of the string returned by +the shortDescription() method on test cases. + +The id method of the test, used internally by the unittest framework, +is also modified to show the arguments. To make sure that test names +stay the same across several invocations, object representations like + + >>> class Foo(object): + ... pass + >>> repr(Foo()) + '<__main__.Foo object at 0x23d8610>' + +are turned into '<__main__.Foo>'. For even more descriptive names, +especially in test logs, you can use the named_parameters decorator. In +this case, only tuples are supported, and the first parameters has to +be a string (or an object that returns an apt name when converted via +str()): + + class NamedExample(parameterized.TestCase): + @parameterized.named_parameters( + ('Normal', 'aa', 'aaa', True), + ('EmptyPrefix', '', 'abc', True), + ('BothEmpty', '', '', True)) + def testStartsWith(self, prefix, string, result): + self.assertEqual(result, strings.startswith(prefix)) + +Named tests also have the benefit that they can be run individually +from the command line: + + $ testmodule.py NamedExample.testStartsWithNormal + . + -------------------------------------------------------------------- + Ran 1 test in 0.000s + + OK + +Parameterized Classes +===================== +If invocation arguments are shared across test methods in a single +TestCase class, instead of decorating all test methods +individually, the class itself can be decorated: + + @parameterized.parameters( + (1, 2, 3) + (4, 5, 9)) + class ArithmeticTest(parameterized.TestCase): + def testAdd(self, arg1, arg2, result): + self.assertEqual(arg1 + arg2, result) + + def testSubtract(self, arg2, arg2, result): + self.assertEqual(result - arg1, arg2) + +Inputs from Iterables +===================== +If parameters should be shared across several test cases, or are dynamically +created from other sources, a single non-tuple iterable can be passed into +the decorator. This iterable will be used to obtain the test cases: + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + c.op1, c.op2, c.result for c in testcases + ) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + + +Single-Argument Test Methods +============================ +If a test method takes only one argument, the single argument does not need to +be wrapped into a tuple: + + class NegativeNumberExample(parameterized.TestCase): + @parameterized.parameters( + -1, -3, -4, -5 + ) + def testIsNegative(self, arg): + self.assertTrue(IsNegative(arg)) +""" + +__author__ = 'tmarek@google.com (Torsten Marek)' + +import functools +import re +import types +import unittest +import uuid + +try: + # Since python 3 + import collections.abc as collections_abc +except ImportError: + # Won't work after python 3.8 + import collections as collections_abc + +ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>') +_SEPARATOR = uuid.uuid1().hex +_FIRST_ARG = object() +_ARGUMENT_REPR = object() + + +def _CleanRepr(obj): + return ADDR_RE.sub(r'<\1>', repr(obj)) + + +# Helper function formerly from the unittest module, removed from it in +# Python 2.7. +def _StrClass(cls): + return '%s.%s' % (cls.__module__, cls.__name__) + + +def _NonStringIterable(obj): + return (isinstance(obj, collections_abc.Iterable) and + not isinstance(obj, str)) + + +def _FormatParameterList(testcase_params): + if isinstance(testcase_params, collections_abc.Mapping): + return ', '.join('%s=%s' % (argname, _CleanRepr(value)) + for argname, value in testcase_params.items()) + elif _NonStringIterable(testcase_params): + return ', '.join(map(_CleanRepr, testcase_params)) + else: + return _FormatParameterList((testcase_params,)) + + +class _ParameterizedTestIter(object): + """Callable and iterable class for producing new test cases.""" + + def __init__(self, test_method, testcases, naming_type): + """Returns concrete test functions for a test and a list of parameters. + + The naming_type is used to determine the name of the concrete + functions as reported by the unittest framework. If naming_type is + _FIRST_ARG, the testcases must be tuples, and the first element must + have a string representation that is a valid Python identifier. + + Args: + test_method: The decorated test method. + testcases: (list of tuple/dict) A list of parameter + tuples/dicts for individual test invocations. + naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR. + """ + self._test_method = test_method + self.testcases = testcases + self._naming_type = naming_type + + def __call__(self, *args, **kwargs): + raise RuntimeError('You appear to be running a parameterized test case ' + 'without having inherited from parameterized.' + 'TestCase. This is bad because none of ' + 'your test cases are actually being run.') + + def __iter__(self): + test_method = self._test_method + naming_type = self._naming_type + + def MakeBoundParamTest(testcase_params): + @functools.wraps(test_method) + def BoundParamTest(self): + if isinstance(testcase_params, collections_abc.Mapping): + test_method(self, **testcase_params) + elif _NonStringIterable(testcase_params): + test_method(self, *testcase_params) + else: + test_method(self, testcase_params) + + if naming_type is _FIRST_ARG: + # Signal the metaclass that the name of the test function is unique + # and descriptive. + BoundParamTest.__x_use_name__ = True + BoundParamTest.__name__ += str(testcase_params[0]) + testcase_params = testcase_params[1:] + elif naming_type is _ARGUMENT_REPR: + # __x_extra_id__ is used to pass naming information to the __new__ + # method of TestGeneratorMetaclass. + # The metaclass will make sure to create a unique, but nondescriptive + # name for this test. + BoundParamTest.__x_extra_id__ = '(%s)' % ( + _FormatParameterList(testcase_params),) + else: + raise RuntimeError('%s is not a valid naming type.' % (naming_type,)) + + BoundParamTest.__doc__ = '%s(%s)' % ( + BoundParamTest.__name__, _FormatParameterList(testcase_params)) + if test_method.__doc__: + BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,) + return BoundParamTest + return (MakeBoundParamTest(c) for c in self.testcases) + + +def _IsSingletonList(testcases): + """True iff testcases contains only a single non-tuple element.""" + return len(testcases) == 1 and not isinstance(testcases[0], tuple) + + +def _ModifyClass(class_object, testcases, naming_type): + assert not getattr(class_object, '_id_suffix', None), ( + 'Cannot add parameters to %s,' + ' which already has parameterized methods.' % (class_object,)) + class_object._id_suffix = id_suffix = {} + # We change the size of __dict__ while we iterate over it, + # which Python 3.x will complain about, so use copy(). + for name, obj in class_object.__dict__.copy().items(): + if (name.startswith(unittest.TestLoader.testMethodPrefix) + and isinstance(obj, types.FunctionType)): + delattr(class_object, name) + methods = {} + _UpdateClassDictForParamTestCase( + methods, id_suffix, name, + _ParameterizedTestIter(obj, testcases, naming_type)) + for name, meth in methods.items(): + setattr(class_object, name, meth) + + +def _ParameterDecorator(naming_type, testcases): + """Implementation of the parameterization decorators. + + Args: + naming_type: The naming type. + testcases: Testcase parameters. + + Returns: + A function for modifying the decorated object. + """ + def _Apply(obj): + if isinstance(obj, type): + _ModifyClass( + obj, + list(testcases) if not isinstance(testcases, collections_abc.Sequence) + else testcases, + naming_type) + return obj + else: + return _ParameterizedTestIter(obj, testcases, naming_type) + + if _IsSingletonList(testcases): + assert _NonStringIterable(testcases[0]), ( + 'Single parameter argument must be a non-string iterable') + testcases = testcases[0] + + return _Apply + + +def parameters(*testcases): # pylint: disable=invalid-name + """A decorator for creating parameterized tests. + + See the module docstring for a usage example. + Args: + *testcases: Parameters for the decorated method, either a single + iterable, or a list of tuples/dicts/objects (for tests + with only one argument). + + Returns: + A test generator to be handled by TestGeneratorMetaclass. + """ + return _ParameterDecorator(_ARGUMENT_REPR, testcases) + + +def named_parameters(*testcases): # pylint: disable=invalid-name + """A decorator for creating parameterized tests. + + See the module docstring for a usage example. The first element of + each parameter tuple should be a string and will be appended to the + name of the test method. + + Args: + *testcases: Parameters for the decorated method, either a single + iterable, or a list of tuples. + + Returns: + A test generator to be handled by TestGeneratorMetaclass. + """ + return _ParameterDecorator(_FIRST_ARG, testcases) + + +class TestGeneratorMetaclass(type): + """Metaclass for test cases with test generators. + + A test generator is an iterable in a testcase that produces callables. These + callables must be single-argument methods. These methods are injected into + the class namespace and the original iterable is removed. If the name of the + iterable conforms to the test pattern, the injected methods will be picked + up as tests by the unittest framework. + + In general, it is supposed to be used in conjunction with the + parameters decorator. + """ + + def __new__(mcs, class_name, bases, dct): + dct['_id_suffix'] = id_suffix = {} + for name, obj in dct.copy().items(): + if (name.startswith(unittest.TestLoader.testMethodPrefix) and + _NonStringIterable(obj)): + iterator = iter(obj) + dct.pop(name) + _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator) + + return type.__new__(mcs, class_name, bases, dct) + + +def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator): + """Adds individual test cases to a dictionary. + + Args: + dct: The target dictionary. + id_suffix: The dictionary for mapping names to test IDs. + name: The original name of the test case. + iterator: The iterator generating the individual test cases. + """ + for idx, func in enumerate(iterator): + assert callable(func), 'Test generators must yield callables, got %r' % ( + func,) + if getattr(func, '__x_use_name__', False): + new_name = func.__name__ + else: + new_name = '%s%s%d' % (name, _SEPARATOR, idx) + assert new_name not in dct, ( + 'Name of parameterized test case "%s" not unique' % (new_name,)) + dct[new_name] = func + id_suffix[new_name] = getattr(func, '__x_extra_id__', '') + + +class TestCase(unittest.TestCase, metaclass=TestGeneratorMetaclass): + """Base class for test cases using the parameters decorator.""" + + def _OriginalName(self): + return self._testMethodName.split(_SEPARATOR)[0] + + def __str__(self): + return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__)) + + def id(self): # pylint: disable=invalid-name + """Returns the descriptive ID of the test. + + This is used internally by the unittesting framework to get a name + for the test to be used in reports. + + Returns: + The test id. + """ + return '%s.%s%s' % (_StrClass(self.__class__), + self._OriginalName(), + self._id_suffix.get(self._testMethodName, '')) + + +def CoopTestCase(other_base_class): + """Returns a new base class with a cooperative metaclass base. + + This enables the TestCase to be used in combination + with other base classes that have custom metaclasses, such as + mox.MoxTestBase. + + Only works with metaclasses that do not override type.__new__. + + Example: + + import google3 + import mox + + from google3.testing.pybase import parameterized + + class ExampleTest(parameterized.CoopTestCase(mox.MoxTestBase)): + ... + + Args: + other_base_class: (class) A test case base class. + + Returns: + A new class object. + """ + metaclass = type( + 'CoopMetaclass', + (other_base_class.__metaclass__, + TestGeneratorMetaclass), {}) + return metaclass( + 'CoopTestCase', + (other_base_class, TestCase), {}) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/api_implementation.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/api_implementation.py new file mode 100644 index 0000000000..7fef237670 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/api_implementation.py @@ -0,0 +1,112 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Determine which implementation of the protobuf API is used in this process. +""" + +import os +import sys +import warnings + +try: + # pylint: disable=g-import-not-at-top + from google.protobuf.internal import _api_implementation + # The compile-time constants in the _api_implementation module can be used to + # switch to a certain implementation of the Python API at build time. + _api_version = _api_implementation.api_version +except ImportError: + _api_version = -1 # Unspecified by compiler flags. + +if _api_version == 1: + raise ValueError('api_version=1 is no longer supported.') + + +_default_implementation_type = ('cpp' if _api_version > 0 else 'python') + + +# This environment variable can be used to switch to a certain implementation +# of the Python API, overriding the compile-time constants in the +# _api_implementation module. Right now only 'python' and 'cpp' are valid +# values. Any other value will be ignored. +_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION', + _default_implementation_type) + +if _implementation_type != 'python': + _implementation_type = 'cpp' + +if 'PyPy' in sys.version and _implementation_type == 'cpp': + warnings.warn('PyPy does not work yet with cpp protocol buffers. ' + 'Falling back to the python implementation.') + _implementation_type = 'python' + + +# Detect if serialization should be deterministic by default +try: + # The presence of this module in a build allows the proto implementation to + # be upgraded merely via build deps. + # + # NOTE: Merely importing this automatically enables deterministic proto + # serialization for C++ code, but we still need to export it as a boolean so + # that we can do the same for `_implementation_type == 'python'`. + # + # NOTE2: It is possible for C++ code to enable deterministic serialization by + # default _without_ affecting Python code, if the C++ implementation is not in + # use by this module. That is intended behavior, so we don't actually expose + # this boolean outside of this module. + # + # pylint: disable=g-import-not-at-top,unused-import + from google.protobuf import enable_deterministic_proto_serialization + _python_deterministic_proto_serialization = True +except ImportError: + _python_deterministic_proto_serialization = False + + +# Usage of this function is discouraged. Clients shouldn't care which +# implementation of the API is in use. Note that there is no guarantee +# that differences between APIs will be maintained. +# Please don't use this function if possible. +def Type(): + return _implementation_type + + +def _SetType(implementation_type): + """Never use! Only for protobuf benchmark.""" + global _implementation_type + _implementation_type = implementation_type + + +# See comment on 'Type' above. +def Version(): + return 2 + + +# For internal use only +def IsPythonDefaultSerializationDeterministic(): + return _python_deterministic_proto_serialization diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/builder.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/builder.py new file mode 100644 index 0000000000..64353ee4af --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/builder.py @@ -0,0 +1,130 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Builds descriptors, message classes and services for generated _pb2.py. + +This file is only called in python generated _pb2.py files. It builds +descriptors, message classes and services that users can directly use +in generated code. +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +_sym_db = _symbol_database.Default() + + +def BuildMessageAndEnumDescriptors(file_des, module): + """Builds message and enum descriptors. + + Args: + file_des: FileDescriptor of the .proto file + module: Generated _pb2 module + """ + + def BuildNestedDescriptors(msg_des, prefix): + for (name, nested_msg) in msg_des.nested_types_by_name.items(): + module_name = prefix + name.upper() + module[module_name] = nested_msg + BuildNestedDescriptors(nested_msg, module_name + '_') + for enum_des in msg_des.enum_types: + module[prefix + enum_des.name.upper()] = enum_des + + for (name, msg_des) in file_des.message_types_by_name.items(): + module_name = '_' + name.upper() + module[module_name] = msg_des + BuildNestedDescriptors(msg_des, module_name + '_') + + +def BuildTopDescriptorsAndMessages(file_des, module_name, module): + """Builds top level descriptors and message classes. + + Args: + file_des: FileDescriptor of the .proto file + module_name: str, the name of generated _pb2 module + module: Generated _pb2 module + """ + + def BuildMessage(msg_des): + create_dict = {} + for (name, nested_msg) in msg_des.nested_types_by_name.items(): + create_dict[name] = BuildMessage(nested_msg) + create_dict['DESCRIPTOR'] = msg_des + create_dict['__module__'] = module_name + message_class = _reflection.GeneratedProtocolMessageType( + msg_des.name, (_message.Message,), create_dict) + _sym_db.RegisterMessage(message_class) + return message_class + + # top level enums + for (name, enum_des) in file_des.enum_types_by_name.items(): + module['_' + name.upper()] = enum_des + module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des) + for enum_value in enum_des.values: + module[enum_value.name] = enum_value.number + + # top level extensions + for (name, extension_des) in file_des.extensions_by_name.items(): + module[name.upper() + '_FIELD_NUMBER'] = extension_des.number + module[name] = extension_des + + # services + for (name, service) in file_des.services_by_name.items(): + module['_' + name.upper()] = service + + # Build messages. + for (name, msg_des) in file_des.message_types_by_name.items(): + module[name] = BuildMessage(msg_des) + + +def BuildServices(file_des, module_name, module): + """Builds services classes and services stub class. + + Args: + file_des: FileDescriptor of the .proto file + module_name: str, the name of generated _pb2 module + module: Generated _pb2 module + """ + # pylint: disable=g-import-not-at-top + from google.protobuf import service as _service + from google.protobuf import service_reflection + # pylint: enable=g-import-not-at-top + for (name, service) in file_des.services_by_name.items(): + module[name] = service_reflection.GeneratedServiceType( + name, (_service.Service,), + dict(DESCRIPTOR=service, __module__=module_name)) + stub_name = name + '_Stub' + module[stub_name] = service_reflection.GeneratedServiceStubType( + stub_name, (module[name],), + dict(DESCRIPTOR=service, __module__=module_name)) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/containers.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/containers.py new file mode 100644 index 0000000000..29fbb53d2f --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/containers.py @@ -0,0 +1,710 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains container classes to represent different protocol buffer types. + +This file defines container classes which represent categories of protocol +buffer field types which need extra maintenance. Currently these categories +are: + +- Repeated scalar fields - These are all repeated fields which aren't + composite (e.g. they are of simple types like int32, string, etc). +- Repeated composite fields - Repeated fields which are composite. This + includes groups and nested messages. +""" + +import collections.abc +import copy +import pickle +from typing import ( + Any, + Iterable, + Iterator, + List, + MutableMapping, + MutableSequence, + NoReturn, + Optional, + Sequence, + TypeVar, + Union, + overload, +) + + +_T = TypeVar('_T') +_K = TypeVar('_K') +_V = TypeVar('_V') + + +class BaseContainer(Sequence[_T]): + """Base container class.""" + + # Minimizes memory usage and disallows assignment to other attributes. + __slots__ = ['_message_listener', '_values'] + + def __init__(self, message_listener: Any) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The RepeatedScalarFieldContainer will call this object's + Modified() method when it is modified. + """ + self._message_listener = message_listener + self._values = [] + + @overload + def __getitem__(self, key: int) -> _T: + ... + + @overload + def __getitem__(self, key: slice) -> List[_T]: + ... + + def __getitem__(self, key): + """Retrieves item by the specified key.""" + return self._values[key] + + def __len__(self) -> int: + """Returns the number of elements in the container.""" + return len(self._values) + + def __ne__(self, other: Any) -> bool: + """Checks if another instance isn't equal to this one.""" + # The concrete classes should define __eq__. + return not self == other + + __hash__ = None + + def __repr__(self) -> str: + return repr(self._values) + + def sort(self, *args, **kwargs) -> None: + # Continue to support the old sort_function keyword argument. + # This is expected to be a rare occurrence, so use LBYL to avoid + # the overhead of actually catching KeyError. + if 'sort_function' in kwargs: + kwargs['cmp'] = kwargs.pop('sort_function') + self._values.sort(*args, **kwargs) + + def reverse(self) -> None: + self._values.reverse() + + +# TODO(slebedev): Remove this. BaseContainer does *not* conform to +# MutableSequence, only its subclasses do. +collections.abc.MutableSequence.register(BaseContainer) + + +class RepeatedScalarFieldContainer(BaseContainer[_T], MutableSequence[_T]): + """Simple, type-checked, list-like container for holding repeated scalars.""" + + # Disallows assignment to other attributes. + __slots__ = ['_type_checker'] + + def __init__( + self, + message_listener: Any, + type_checker: Any, + ) -> None: + """Args: + + message_listener: A MessageListener implementation. The + RepeatedScalarFieldContainer will call this object's Modified() method + when it is modified. + type_checker: A type_checkers.ValueChecker instance to run on elements + inserted into this container. + """ + super().__init__(message_listener) + self._type_checker = type_checker + + def append(self, value: _T) -> None: + """Appends an item to the list. Similar to list.append().""" + self._values.append(self._type_checker.CheckValue(value)) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def insert(self, key: int, value: _T) -> None: + """Inserts the item at the specified position. Similar to list.insert().""" + self._values.insert(key, self._type_checker.CheckValue(value)) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def extend(self, elem_seq: Iterable[_T]) -> None: + """Extends by appending the given iterable. Similar to list.extend().""" + if elem_seq is None: + return + try: + elem_seq_iter = iter(elem_seq) + except TypeError: + if not elem_seq: + # silently ignore falsy inputs :-/. + # TODO(ptucker): Deprecate this behavior. b/18413862 + return + raise + + new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter] + if new_values: + self._values.extend(new_values) + self._message_listener.Modified() + + def MergeFrom( + self, + other: Union['RepeatedScalarFieldContainer[_T]', Iterable[_T]], + ) -> None: + """Appends the contents of another repeated field of the same type to this + one. We do not check the types of the individual fields. + """ + self._values.extend(other) + self._message_listener.Modified() + + def remove(self, elem: _T): + """Removes an item from the list. Similar to list.remove().""" + self._values.remove(elem) + self._message_listener.Modified() + + def pop(self, key: Optional[int] = -1) -> _T: + """Removes and returns an item at a given index. Similar to list.pop().""" + value = self._values[key] + self.__delitem__(key) + return value + + @overload + def __setitem__(self, key: int, value: _T) -> None: + ... + + @overload + def __setitem__(self, key: slice, value: Iterable[_T]) -> None: + ... + + def __setitem__(self, key, value) -> None: + """Sets the item on the specified position.""" + if isinstance(key, slice): + if key.step is not None: + raise ValueError('Extended slices not supported') + self._values[key] = map(self._type_checker.CheckValue, value) + self._message_listener.Modified() + else: + self._values[key] = self._type_checker.CheckValue(value) + self._message_listener.Modified() + + def __delitem__(self, key: Union[int, slice]) -> None: + """Deletes the item at the specified position.""" + del self._values[key] + self._message_listener.Modified() + + def __eq__(self, other: Any) -> bool: + """Compares the current instance with another one.""" + if self is other: + return True + # Special case for the same type which should be common and fast. + if isinstance(other, self.__class__): + return other._values == self._values + # We are presumably comparing against some other sequence type. + return other == self._values + + def __deepcopy__( + self, + unused_memo: Any = None, + ) -> 'RepeatedScalarFieldContainer[_T]': + clone = RepeatedScalarFieldContainer( + copy.deepcopy(self._message_listener), self._type_checker) + clone.MergeFrom(self) + return clone + + def __reduce__(self, **kwargs) -> NoReturn: + raise pickle.PickleError( + "Can't pickle repeated scalar fields, convert to list first") + + +# TODO(slebedev): Constrain T to be a subtype of Message. +class RepeatedCompositeFieldContainer(BaseContainer[_T], MutableSequence[_T]): + """Simple, list-like container for holding repeated composite fields.""" + + # Disallows assignment to other attributes. + __slots__ = ['_message_descriptor'] + + def __init__(self, message_listener: Any, message_descriptor: Any) -> None: + """ + Note that we pass in a descriptor instead of the generated directly, + since at the time we construct a _RepeatedCompositeFieldContainer we + haven't yet necessarily initialized the type that will be contained in the + container. + + Args: + message_listener: A MessageListener implementation. + The RepeatedCompositeFieldContainer will call this object's + Modified() method when it is modified. + message_descriptor: A Descriptor instance describing the protocol type + that should be present in this container. We'll use the + _concrete_class field of this descriptor when the client calls add(). + """ + super().__init__(message_listener) + self._message_descriptor = message_descriptor + + def add(self, **kwargs: Any) -> _T: + """Adds a new element at the end of the list and returns it. Keyword + arguments may be used to initialize the element. + """ + new_element = self._message_descriptor._concrete_class(**kwargs) + new_element._SetListener(self._message_listener) + self._values.append(new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + return new_element + + def append(self, value: _T) -> None: + """Appends one element by copying the message.""" + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + new_element.CopyFrom(value) + self._values.append(new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def insert(self, key: int, value: _T) -> None: + """Inserts the item at the specified position by copying.""" + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + new_element.CopyFrom(value) + self._values.insert(key, new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def extend(self, elem_seq: Iterable[_T]) -> None: + """Extends by appending the given sequence of elements of the same type + + as this one, copying each individual message. + """ + message_class = self._message_descriptor._concrete_class + listener = self._message_listener + values = self._values + for message in elem_seq: + new_element = message_class() + new_element._SetListener(listener) + new_element.MergeFrom(message) + values.append(new_element) + listener.Modified() + + def MergeFrom( + self, + other: Union['RepeatedCompositeFieldContainer[_T]', Iterable[_T]], + ) -> None: + """Appends the contents of another repeated field of the same type to this + one, copying each individual message. + """ + self.extend(other) + + def remove(self, elem: _T) -> None: + """Removes an item from the list. Similar to list.remove().""" + self._values.remove(elem) + self._message_listener.Modified() + + def pop(self, key: Optional[int] = -1) -> _T: + """Removes and returns an item at a given index. Similar to list.pop().""" + value = self._values[key] + self.__delitem__(key) + return value + + @overload + def __setitem__(self, key: int, value: _T) -> None: + ... + + @overload + def __setitem__(self, key: slice, value: Iterable[_T]) -> None: + ... + + def __setitem__(self, key, value): + # This method is implemented to make RepeatedCompositeFieldContainer + # structurally compatible with typing.MutableSequence. It is + # otherwise unsupported and will always raise an error. + raise TypeError( + f'{self.__class__.__name__} object does not support item assignment') + + def __delitem__(self, key: Union[int, slice]) -> None: + """Deletes the item at the specified position.""" + del self._values[key] + self._message_listener.Modified() + + def __eq__(self, other: Any) -> bool: + """Compares the current instance with another one.""" + if self is other: + return True + if not isinstance(other, self.__class__): + raise TypeError('Can only compare repeated composite fields against ' + 'other repeated composite fields.') + return self._values == other._values + + +class ScalarMap(MutableMapping[_K, _V]): + """Simple, type-checked, dict-like container for holding repeated scalars.""" + + # Disallows assignment to other attributes. + __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener', + '_entry_descriptor'] + + def __init__( + self, + message_listener: Any, + key_checker: Any, + value_checker: Any, + entry_descriptor: Any, + ) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The ScalarMap will call this object's Modified() method when it + is modified. + key_checker: A type_checkers.ValueChecker instance to run on keys + inserted into this container. + value_checker: A type_checkers.ValueChecker instance to run on values + inserted into this container. + entry_descriptor: The MessageDescriptor of a map entry: key and value. + """ + self._message_listener = message_listener + self._key_checker = key_checker + self._value_checker = value_checker + self._entry_descriptor = entry_descriptor + self._values = {} + + def __getitem__(self, key: _K) -> _V: + try: + return self._values[key] + except KeyError: + key = self._key_checker.CheckValue(key) + val = self._value_checker.DefaultValue() + self._values[key] = val + return val + + def __contains__(self, item: _K) -> bool: + # We check the key's type to match the strong-typing flavor of the API. + # Also this makes it easier to match the behavior of the C++ implementation. + self._key_checker.CheckValue(item) + return item in self._values + + @overload + def get(self, key: _K) -> Optional[_V]: + ... + + @overload + def get(self, key: _K, default: _T) -> Union[_V, _T]: + ... + + # We need to override this explicitly, because our defaultdict-like behavior + # will make the default implementation (from our base class) always insert + # the key. + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + + def __setitem__(self, key: _K, value: _V) -> _T: + checked_key = self._key_checker.CheckValue(key) + checked_value = self._value_checker.CheckValue(value) + self._values[checked_key] = checked_value + self._message_listener.Modified() + + def __delitem__(self, key: _K) -> None: + del self._values[key] + self._message_listener.Modified() + + def __len__(self) -> int: + return len(self._values) + + def __iter__(self) -> Iterator[_K]: + return iter(self._values) + + def __repr__(self) -> str: + return repr(self._values) + + def MergeFrom(self, other: 'ScalarMap[_K, _V]') -> None: + self._values.update(other._values) + self._message_listener.Modified() + + def InvalidateIterators(self) -> None: + # It appears that the only way to reliably invalidate iterators to + # self._values is to ensure that its size changes. + original = self._values + self._values = original.copy() + original[None] = None + + # This is defined in the abstract base, but we can do it much more cheaply. + def clear(self) -> None: + self._values.clear() + self._message_listener.Modified() + + def GetEntryClass(self) -> Any: + return self._entry_descriptor._concrete_class + + +class MessageMap(MutableMapping[_K, _V]): + """Simple, type-checked, dict-like container for with submessage values.""" + + # Disallows assignment to other attributes. + __slots__ = ['_key_checker', '_values', '_message_listener', + '_message_descriptor', '_entry_descriptor'] + + def __init__( + self, + message_listener: Any, + message_descriptor: Any, + key_checker: Any, + entry_descriptor: Any, + ) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The ScalarMap will call this object's Modified() method when it + is modified. + key_checker: A type_checkers.ValueChecker instance to run on keys + inserted into this container. + value_checker: A type_checkers.ValueChecker instance to run on values + inserted into this container. + entry_descriptor: The MessageDescriptor of a map entry: key and value. + """ + self._message_listener = message_listener + self._message_descriptor = message_descriptor + self._key_checker = key_checker + self._entry_descriptor = entry_descriptor + self._values = {} + + def __getitem__(self, key: _K) -> _V: + key = self._key_checker.CheckValue(key) + try: + return self._values[key] + except KeyError: + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + self._values[key] = new_element + self._message_listener.Modified() + return new_element + + def get_or_create(self, key: _K) -> _V: + """get_or_create() is an alias for getitem (ie. map[key]). + + Args: + key: The key to get or create in the map. + + This is useful in cases where you want to be explicit that the call is + mutating the map. This can avoid lint errors for statements like this + that otherwise would appear to be pointless statements: + + msg.my_map[key] + """ + return self[key] + + @overload + def get(self, key: _K) -> Optional[_V]: + ... + + @overload + def get(self, key: _K, default: _T) -> Union[_V, _T]: + ... + + # We need to override this explicitly, because our defaultdict-like behavior + # will make the default implementation (from our base class) always insert + # the key. + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + + def __contains__(self, item: _K) -> bool: + item = self._key_checker.CheckValue(item) + return item in self._values + + def __setitem__(self, key: _K, value: _V) -> NoReturn: + raise ValueError('May not set values directly, call my_map[key].foo = 5') + + def __delitem__(self, key: _K) -> None: + key = self._key_checker.CheckValue(key) + del self._values[key] + self._message_listener.Modified() + + def __len__(self) -> int: + return len(self._values) + + def __iter__(self) -> Iterator[_K]: + return iter(self._values) + + def __repr__(self) -> str: + return repr(self._values) + + def MergeFrom(self, other: 'MessageMap[_K, _V]') -> None: + # pylint: disable=protected-access + for key in other._values: + # According to documentation: "When parsing from the wire or when merging, + # if there are duplicate map keys the last key seen is used". + if key in self: + del self[key] + self[key].CopyFrom(other[key]) + # self._message_listener.Modified() not required here, because + # mutations to submessages already propagate. + + def InvalidateIterators(self) -> None: + # It appears that the only way to reliably invalidate iterators to + # self._values is to ensure that its size changes. + original = self._values + self._values = original.copy() + original[None] = None + + # This is defined in the abstract base, but we can do it much more cheaply. + def clear(self) -> None: + self._values.clear() + self._message_listener.Modified() + + def GetEntryClass(self) -> Any: + return self._entry_descriptor._concrete_class + + +class _UnknownField: + """A parsed unknown field.""" + + # Disallows assignment to other attributes. + __slots__ = ['_field_number', '_wire_type', '_data'] + + def __init__(self, field_number, wire_type, data): + self._field_number = field_number + self._wire_type = wire_type + self._data = data + return + + def __lt__(self, other): + # pylint: disable=protected-access + return self._field_number < other._field_number + + def __eq__(self, other): + if self is other: + return True + # pylint: disable=protected-access + return (self._field_number == other._field_number and + self._wire_type == other._wire_type and + self._data == other._data) + + +class UnknownFieldRef: # pylint: disable=missing-class-docstring + + def __init__(self, parent, index): + self._parent = parent + self._index = index + + def _check_valid(self): + if not self._parent: + raise ValueError('UnknownField does not exist. ' + 'The parent message might be cleared.') + if self._index >= len(self._parent): + raise ValueError('UnknownField does not exist. ' + 'The parent message might be cleared.') + + @property + def field_number(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._field_number + + @property + def wire_type(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._wire_type + + @property + def data(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._data + + +class UnknownFieldSet: + """UnknownField container""" + + # Disallows assignment to other attributes. + __slots__ = ['_values'] + + def __init__(self): + self._values = [] + + def __getitem__(self, index): + if self._values is None: + raise ValueError('UnknownFields does not exist. ' + 'The parent message might be cleared.') + size = len(self._values) + if index < 0: + index += size + if index < 0 or index >= size: + raise IndexError('index %d out of range'.index) + + return UnknownFieldRef(self, index) + + def _internal_get(self, index): + return self._values[index] + + def __len__(self): + if self._values is None: + raise ValueError('UnknownFields does not exist. ' + 'The parent message might be cleared.') + return len(self._values) + + def _add(self, field_number, wire_type, data): + unknown_field = _UnknownField(field_number, wire_type, data) + self._values.append(unknown_field) + return unknown_field + + def __iter__(self): + for i in range(len(self)): + yield UnknownFieldRef(self, i) + + def _extend(self, other): + if other is None: + return + # pylint: disable=protected-access + self._values.extend(other._values) + + def __eq__(self, other): + if self is other: + return True + # Sort unknown fields because their order shouldn't + # affect equality test. + values = list(self._values) + if other is None: + return not values + values.sort() + # pylint: disable=protected-access + other_values = sorted(other._values) + return values == other_values + + def _clear(self): + for value in self._values: + # pylint: disable=protected-access + if isinstance(value._data, UnknownFieldSet): + value._data._clear() # pylint: disable=protected-access + self._values = None diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/decoder.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/decoder.py new file mode 100644 index 0000000000..bc1b7b785c --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/decoder.py @@ -0,0 +1,1029 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Code for decoding protocol buffer primitives. + +This code is very similar to encoder.py -- read the docs for that module first. + +A "decoder" is a function with the signature: + Decode(buffer, pos, end, message, field_dict) +The arguments are: + buffer: The string containing the encoded message. + pos: The current position in the string. + end: The position in the string where the current message ends. May be + less than len(buffer) if we're reading a sub-message. + message: The message object into which we're parsing. + field_dict: message._fields (avoids a hashtable lookup). +The decoder reads the field and stores it into field_dict, returning the new +buffer position. A decoder for a repeated field may proactively decode all of +the elements of that field, if they appear consecutively. + +Note that decoders may throw any of the following: + IndexError: Indicates a truncated message. + struct.error: Unpacking of a fixed-width field failed. + message.DecodeError: Other errors. + +Decoders are expected to raise an exception if they are called with pos > end. +This allows callers to be lax about bounds checking: it's fineto read past +"end" as long as you are sure that someone else will notice and throw an +exception later on. + +Something up the call stack is expected to catch IndexError and struct.error +and convert them to message.DecodeError. + +Decoders are constructed using decoder constructors with the signature: + MakeDecoder(field_number, is_repeated, is_packed, key, new_default) +The arguments are: + field_number: The field number of the field we want to decode. + is_repeated: Is the field a repeated field? (bool) + is_packed: Is the field a packed field? (bool) + key: The key to use when looking up the field within field_dict. + (This is actually the FieldDescriptor but nothing in this + file should depend on that.) + new_default: A function which takes a message object as a parameter and + returns a new instance of the default value for this field. + (This is called for repeated fields and sub-messages, when an + instance does not already exist.) + +As with encoders, we define a decoder constructor for every type of field. +Then, for every field of every message class we construct an actual decoder. +That decoder goes into a dict indexed by tag, so when we decode a message +we repeatedly read a tag, look up the corresponding decoder, and invoke it. +""" + +__author__ = 'kenton@google.com (Kenton Varda)' + +import math +import struct + +from google.protobuf.internal import containers +from google.protobuf.internal import encoder +from google.protobuf.internal import wire_format +from google.protobuf import message + + +# This is not for optimization, but rather to avoid conflicts with local +# variables named "message". +_DecodeError = message.DecodeError + + +def _VarintDecoder(mask, result_type): + """Return an encoder for a basic varint value (does not include tag). + + Decoded values will be bitwise-anded with the given mask before being + returned, e.g. to limit them to 32 bits. The returned decoder does not + take the usual "end" parameter -- the caller is expected to do bounds checking + after the fact (often the caller can defer such checking until later). The + decoder returns a (value, new_pos) pair. + """ + + def DecodeVarint(buffer, pos): + result = 0 + shift = 0 + while 1: + b = buffer[pos] + result |= ((b & 0x7f) << shift) + pos += 1 + if not (b & 0x80): + result &= mask + result = result_type(result) + return (result, pos) + shift += 7 + if shift >= 64: + raise _DecodeError('Too many bytes when decoding varint.') + return DecodeVarint + + +def _SignedVarintDecoder(bits, result_type): + """Like _VarintDecoder() but decodes signed values.""" + + signbit = 1 << (bits - 1) + mask = (1 << bits) - 1 + + def DecodeVarint(buffer, pos): + result = 0 + shift = 0 + while 1: + b = buffer[pos] + result |= ((b & 0x7f) << shift) + pos += 1 + if not (b & 0x80): + result &= mask + result = (result ^ signbit) - signbit + result = result_type(result) + return (result, pos) + shift += 7 + if shift >= 64: + raise _DecodeError('Too many bytes when decoding varint.') + return DecodeVarint + +# All 32-bit and 64-bit values are represented as int. +_DecodeVarint = _VarintDecoder((1 << 64) - 1, int) +_DecodeSignedVarint = _SignedVarintDecoder(64, int) + +# Use these versions for values which must be limited to 32 bits. +_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int) +_DecodeSignedVarint32 = _SignedVarintDecoder(32, int) + + +def ReadTag(buffer, pos): + """Read a tag from the memoryview, and return a (tag_bytes, new_pos) tuple. + + We return the raw bytes of the tag rather than decoding them. The raw + bytes can then be used to look up the proper decoder. This effectively allows + us to trade some work that would be done in pure-python (decoding a varint) + for work that is done in C (searching for a byte string in a hash table). + In a low-level language it would be much cheaper to decode the varint and + use that, but not in Python. + + Args: + buffer: memoryview object of the encoded bytes + pos: int of the current position to start from + + Returns: + Tuple[bytes, int] of the tag data and new position. + """ + start = pos + while buffer[pos] & 0x80: + pos += 1 + pos += 1 + + tag_bytes = buffer[start:pos].tobytes() + return tag_bytes, pos + + +# -------------------------------------------------------------------- + + +def _SimpleDecoder(wire_type, decode_value): + """Return a constructor for a decoder for fields of a particular type. + + Args: + wire_type: The field's wire type. + decode_value: A function which decodes an individual value, e.g. + _DecodeVarint() + """ + + def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default, + clear_if_default=False): + if is_packed: + local_DecodeVarint = _DecodeVarint + def DecodePackedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + (endpoint, pos) = local_DecodeVarint(buffer, pos) + endpoint += pos + if endpoint > end: + raise _DecodeError('Truncated message.') + while pos < endpoint: + (element, pos) = decode_value(buffer, pos) + value.append(element) + if pos > endpoint: + del value[-1] # Discard corrupt value. + raise _DecodeError('Packed element was truncated.') + return pos + return DecodePackedField + elif is_repeated: + tag_bytes = encoder.TagBytes(field_number, wire_type) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (element, new_pos) = decode_value(buffer, pos) + value.append(element) + # Predict that the next tag is another copy of the same repeated + # field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos >= end: + # Prediction failed. Return. + if new_pos > end: + raise _DecodeError('Truncated message.') + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (new_value, pos) = decode_value(buffer, pos) + if pos > end: + raise _DecodeError('Truncated message.') + if clear_if_default and not new_value: + field_dict.pop(key, None) + else: + field_dict[key] = new_value + return pos + return DecodeField + + return SpecificDecoder + + +def _ModifiedDecoder(wire_type, decode_value, modify_value): + """Like SimpleDecoder but additionally invokes modify_value on every value + before storing it. Usually modify_value is ZigZagDecode. + """ + + # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but + # not enough to make a significant difference. + + def InnerDecode(buffer, pos): + (result, new_pos) = decode_value(buffer, pos) + return (modify_value(result), new_pos) + return _SimpleDecoder(wire_type, InnerDecode) + + +def _StructPackDecoder(wire_type, format): + """Return a constructor for a decoder for a fixed-width field. + + Args: + wire_type: The field's wire type. + format: The format string to pass to struct.unpack(). + """ + + value_size = struct.calcsize(format) + local_unpack = struct.unpack + + # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but + # not enough to make a significant difference. + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + + def InnerDecode(buffer, pos): + new_pos = pos + value_size + result = local_unpack(format, buffer[pos:new_pos])[0] + return (result, new_pos) + return _SimpleDecoder(wire_type, InnerDecode) + + +def _FloatDecoder(): + """Returns a decoder for a float field. + + This code works around a bug in struct.unpack for non-finite 32-bit + floating-point values. + """ + + local_unpack = struct.unpack + + def InnerDecode(buffer, pos): + """Decode serialized float to a float and new position. + + Args: + buffer: memoryview of the serialized bytes + pos: int, position in the memory view to start at. + + Returns: + Tuple[float, int] of the deserialized float value and new position + in the serialized data. + """ + # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign + # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand. + new_pos = pos + 4 + float_bytes = buffer[pos:new_pos].tobytes() + + # If this value has all its exponent bits set, then it's non-finite. + # In Python 2.4, struct.unpack will convert it to a finite 64-bit value. + # To avoid that, we parse it specially. + if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'): + # If at least one significand bit is set... + if float_bytes[0:3] != b'\x00\x00\x80': + return (math.nan, new_pos) + # If sign bit is set... + if float_bytes[3:4] == b'\xFF': + return (-math.inf, new_pos) + return (math.inf, new_pos) + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + result = local_unpack('= b'\xF0') + and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')): + return (math.nan, new_pos) + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + result = local_unpack(' end: + raise _DecodeError('Truncated message.') + while pos < endpoint: + value_start_pos = pos + (element, pos) = _DecodeSignedVarint32(buffer, pos) + # pylint: disable=protected-access + if element in enum_type.values_by_number: + value.append(element) + else: + if not message._unknown_fields: + message._unknown_fields = [] + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_VARINT) + + message._unknown_fields.append( + (tag_bytes, buffer[value_start_pos:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, element) + # pylint: enable=protected-access + if pos > endpoint: + if element in enum_type.values_by_number: + del value[-1] # Discard corrupt value. + else: + del message._unknown_fields[-1] + # pylint: disable=protected-access + del message._unknown_field_set._values[-1] + # pylint: enable=protected-access + raise _DecodeError('Packed element was truncated.') + return pos + return DecodePackedField + elif is_repeated: + tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + """Decode serialized repeated enum to its value and a new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (element, new_pos) = _DecodeSignedVarint32(buffer, pos) + # pylint: disable=protected-access + if element in enum_type.values_by_number: + value.append(element) + else: + if not message._unknown_fields: + message._unknown_fields = [] + message._unknown_fields.append( + (tag_bytes, buffer[pos:new_pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, element) + # pylint: enable=protected-access + # Predict that the next tag is another copy of the same repeated + # field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos >= end: + # Prediction failed. Return. + if new_pos > end: + raise _DecodeError('Truncated message.') + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + """Decode serialized repeated enum to its value and a new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + value_start_pos = pos + (enum_value, pos) = _DecodeSignedVarint32(buffer, pos) + if pos > end: + raise _DecodeError('Truncated message.') + if clear_if_default and not enum_value: + field_dict.pop(key, None) + return pos + # pylint: disable=protected-access + if enum_value in enum_type.values_by_number: + field_dict[key] = enum_value + else: + if not message._unknown_fields: + message._unknown_fields = [] + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_VARINT) + message._unknown_fields.append( + (tag_bytes, buffer[value_start_pos:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, enum_value) + # pylint: enable=protected-access + return pos + return DecodeField + + +# -------------------------------------------------------------------- + + +Int32Decoder = _SimpleDecoder( + wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32) + +Int64Decoder = _SimpleDecoder( + wire_format.WIRETYPE_VARINT, _DecodeSignedVarint) + +UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32) +UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint) + +SInt32Decoder = _ModifiedDecoder( + wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode) +SInt64Decoder = _ModifiedDecoder( + wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode) + +# Note that Python conveniently guarantees that when using the '<' prefix on +# formats, they will also have the same size across all platforms (as opposed +# to without the prefix, where their sizes depend on the C compiler's basic +# type sizes). +Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, ' end: + raise _DecodeError('Truncated string.') + value.append(_ConvertToUnicode(buffer[pos:new_pos])) + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + if clear_if_default and not size: + field_dict.pop(key, None) + else: + field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos]) + return new_pos + return DecodeField + + +def BytesDecoder(field_number, is_repeated, is_packed, key, new_default, + clear_if_default=False): + """Returns a decoder for a bytes field.""" + + local_DecodeVarint = _DecodeVarint + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + value.append(buffer[pos:new_pos].tobytes()) + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + if clear_if_default and not size: + field_dict.pop(key, None) + else: + field_dict[key] = buffer[pos:new_pos].tobytes() + return new_pos + return DecodeField + + +def GroupDecoder(field_number, is_repeated, is_packed, key, new_default): + """Returns a decoder for a group field.""" + + end_tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_END_GROUP) + end_tag_len = len(end_tag_bytes) + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_START_GROUP) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read sub-message. + pos = value.add()._InternalParse(buffer, pos, end) + # Read end tag. + new_pos = pos+end_tag_len + if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: + raise _DecodeError('Missing group end tag.') + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read sub-message. + pos = value._InternalParse(buffer, pos, end) + # Read end tag. + new_pos = pos+end_tag_len + if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: + raise _DecodeError('Missing group end tag.') + return new_pos + return DecodeField + + +def MessageDecoder(field_number, is_repeated, is_packed, key, new_default): + """Returns a decoder for a message field.""" + + local_DecodeVarint = _DecodeVarint + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + if value.add()._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + if value._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it encountered + # an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + return new_pos + return DecodeField + + +# -------------------------------------------------------------------- + +MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP) + +def MessageSetItemDecoder(descriptor): + """Returns a decoder for a MessageSet item. + + The parameter is the message Descriptor. + + The message set message looks like this: + message MessageSet { + repeated group Item = 1 { + required int32 type_id = 2; + required string message = 3; + } + } + """ + + type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT) + message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED) + item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP) + + local_ReadTag = ReadTag + local_DecodeVarint = _DecodeVarint + local_SkipField = SkipField + + def DecodeItem(buffer, pos, end, message, field_dict): + """Decode serialized message set to its value and new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + message_set_item_start = pos + type_id = -1 + message_start = -1 + message_end = -1 + + # Technically, type_id and message can appear in any order, so we need + # a little loop here. + while 1: + (tag_bytes, pos) = local_ReadTag(buffer, pos) + if tag_bytes == type_id_tag_bytes: + (type_id, pos) = local_DecodeVarint(buffer, pos) + elif tag_bytes == message_tag_bytes: + (size, message_start) = local_DecodeVarint(buffer, pos) + pos = message_end = message_start + size + elif tag_bytes == item_end_tag_bytes: + break + else: + pos = SkipField(buffer, pos, end, tag_bytes) + if pos == -1: + raise _DecodeError('Missing group end tag.') + + if pos > end: + raise _DecodeError('Truncated message.') + + if type_id == -1: + raise _DecodeError('MessageSet item missing type_id.') + if message_start == -1: + raise _DecodeError('MessageSet item missing message.') + + extension = message.Extensions._FindExtensionByNumber(type_id) + # pylint: disable=protected-access + if extension is not None: + value = field_dict.get(extension) + if value is None: + message_type = extension.message_type + if not hasattr(message_type, '_concrete_class'): + # pylint: disable=protected-access + message._FACTORY.GetPrototype(message_type) + value = field_dict.setdefault( + extension, message_type._concrete_class()) + if value._InternalParse(buffer, message_start,message_end) != message_end: + # The only reason _InternalParse would return early is if it encountered + # an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + else: + if not message._unknown_fields: + message._unknown_fields = [] + message._unknown_fields.append( + (MESSAGE_SET_ITEM_TAG, buffer[message_set_item_start:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + type_id, + wire_format.WIRETYPE_LENGTH_DELIMITED, + buffer[message_start:message_end].tobytes()) + # pylint: enable=protected-access + + return pos + + return DecodeItem + +# -------------------------------------------------------------------- + +def MapDecoder(field_descriptor, new_default, is_message_map): + """Returns a decoder for a map field.""" + + key = field_descriptor + tag_bytes = encoder.TagBytes(field_descriptor.number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + local_DecodeVarint = _DecodeVarint + # Can't read _concrete_class yet; might not be initialized. + message_type = field_descriptor.message_type + + def DecodeMap(buffer, pos, end, message, field_dict): + submsg = message_type._concrete_class() + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + submsg.Clear() + if submsg._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + + if is_message_map: + value[submsg.key].CopyFrom(submsg.value) + else: + value[submsg.key] = submsg.value + + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + + return DecodeMap + +# -------------------------------------------------------------------- +# Optimization is not as heavy here because calls to SkipField() are rare, +# except for handling end-group tags. + +def _SkipVarint(buffer, pos, end): + """Skip a varint value. Returns the new position.""" + # Previously ord(buffer[pos]) raised IndexError when pos is out of range. + # With this code, ord(b'') raises TypeError. Both are handled in + # python_message.py to generate a 'Truncated message' error. + while ord(buffer[pos:pos+1].tobytes()) & 0x80: + pos += 1 + pos += 1 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + +def _SkipFixed64(buffer, pos, end): + """Skip a fixed64 value. Returns the new position.""" + + pos += 8 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + + +def _DecodeFixed64(buffer, pos): + """Decode a fixed64.""" + new_pos = pos + 8 + return (struct.unpack(' end: + raise _DecodeError('Truncated message.') + return pos + + +def _SkipGroup(buffer, pos, end): + """Skip sub-group. Returns the new position.""" + + while 1: + (tag_bytes, pos) = ReadTag(buffer, pos) + new_pos = SkipField(buffer, pos, end, tag_bytes) + if new_pos == -1: + return pos + pos = new_pos + + +def _DecodeUnknownFieldSet(buffer, pos, end_pos=None): + """Decode UnknownFieldSet. Returns the UnknownFieldSet and new position.""" + + unknown_field_set = containers.UnknownFieldSet() + while end_pos is None or pos < end_pos: + (tag_bytes, pos) = ReadTag(buffer, pos) + (tag, _) = _DecodeVarint(tag_bytes, 0) + field_number, wire_type = wire_format.UnpackTag(tag) + if wire_type == wire_format.WIRETYPE_END_GROUP: + break + (data, pos) = _DecodeUnknownField(buffer, pos, wire_type) + # pylint: disable=protected-access + unknown_field_set._add(field_number, wire_type, data) + + return (unknown_field_set, pos) + + +def _DecodeUnknownField(buffer, pos, wire_type): + """Decode a unknown field. Returns the UnknownField and new position.""" + + if wire_type == wire_format.WIRETYPE_VARINT: + (data, pos) = _DecodeVarint(buffer, pos) + elif wire_type == wire_format.WIRETYPE_FIXED64: + (data, pos) = _DecodeFixed64(buffer, pos) + elif wire_type == wire_format.WIRETYPE_FIXED32: + (data, pos) = _DecodeFixed32(buffer, pos) + elif wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED: + (size, pos) = _DecodeVarint(buffer, pos) + data = buffer[pos:pos+size].tobytes() + pos += size + elif wire_type == wire_format.WIRETYPE_START_GROUP: + (data, pos) = _DecodeUnknownFieldSet(buffer, pos) + elif wire_type == wire_format.WIRETYPE_END_GROUP: + return (0, -1) + else: + raise _DecodeError('Wrong wire type in tag.') + + return (data, pos) + + +def _EndGroup(buffer, pos, end): + """Skipping an END_GROUP tag returns -1 to tell the parent loop to break.""" + + return -1 + + +def _SkipFixed32(buffer, pos, end): + """Skip a fixed32 value. Returns the new position.""" + + pos += 4 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + + +def _DecodeFixed32(buffer, pos): + """Decode a fixed32.""" + + new_pos = pos + 4 + return (struct.unpack('B').pack + + def EncodeVarint(write, value, unused_deterministic=None): + bits = value & 0x7f + value >>= 7 + while value: + write(local_int2byte(0x80|bits)) + bits = value & 0x7f + value >>= 7 + return write(local_int2byte(bits)) + + return EncodeVarint + + +def _SignedVarintEncoder(): + """Return an encoder for a basic signed varint value (does not include + tag).""" + + local_int2byte = struct.Struct('>B').pack + + def EncodeSignedVarint(write, value, unused_deterministic=None): + if value < 0: + value += (1 << 64) + bits = value & 0x7f + value >>= 7 + while value: + write(local_int2byte(0x80|bits)) + bits = value & 0x7f + value >>= 7 + return write(local_int2byte(bits)) + + return EncodeSignedVarint + + +_EncodeVarint = _VarintEncoder() +_EncodeSignedVarint = _SignedVarintEncoder() + + +def _VarintBytes(value): + """Encode the given integer as a varint and return the bytes. This is only + called at startup time so it doesn't need to be fast.""" + + pieces = [] + _EncodeVarint(pieces.append, value, True) + return b"".join(pieces) + + +def TagBytes(field_number, wire_type): + """Encode the given tag and return the bytes. Only called at startup.""" + + return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type))) + +# -------------------------------------------------------------------- +# As with sizers (see above), we have a number of common encoder +# implementations. + + +def _SimpleEncoder(wire_type, encode_value, compute_value_size): + """Return a constructor for an encoder for fields of a particular type. + + Args: + wire_type: The field's wire type, for encoding tags. + encode_value: A function which encodes an individual value, e.g. + _EncodeVarint(). + compute_value_size: A function which computes the size of an individual + value, e.g. _VarintSize(). + """ + + def SpecificEncoder(field_number, is_repeated, is_packed): + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + size = 0 + for element in value: + size += compute_value_size(element) + local_EncodeVarint(write, size, deterministic) + for element in value: + encode_value(write, element, deterministic) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, deterministic): + for element in value: + write(tag_bytes) + encode_value(write, element, deterministic) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, deterministic): + write(tag_bytes) + return encode_value(write, value, deterministic) + return EncodeField + + return SpecificEncoder + + +def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value): + """Like SimpleEncoder but additionally invokes modify_value on every value + before passing it to encode_value. Usually modify_value is ZigZagEncode.""" + + def SpecificEncoder(field_number, is_repeated, is_packed): + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + size = 0 + for element in value: + size += compute_value_size(modify_value(element)) + local_EncodeVarint(write, size, deterministic) + for element in value: + encode_value(write, modify_value(element), deterministic) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, deterministic): + for element in value: + write(tag_bytes) + encode_value(write, modify_value(element), deterministic) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, deterministic): + write(tag_bytes) + return encode_value(write, modify_value(value), deterministic) + return EncodeField + + return SpecificEncoder + + +def _StructPackEncoder(wire_type, format): + """Return a constructor for an encoder for a fixed-width field. + + Args: + wire_type: The field's wire type, for encoding tags. + format: The format string to pass to struct.pack(). + """ + + value_size = struct.calcsize(format) + + def SpecificEncoder(field_number, is_repeated, is_packed): + local_struct_pack = struct.pack + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + local_EncodeVarint(write, len(value) * value_size, deterministic) + for element in value: + write(local_struct_pack(format, element)) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, unused_deterministic=None): + for element in value: + write(tag_bytes) + write(local_struct_pack(format, element)) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, unused_deterministic=None): + write(tag_bytes) + return write(local_struct_pack(format, value)) + return EncodeField + + return SpecificEncoder + + +def _FloatingPointEncoder(wire_type, format): + """Return a constructor for an encoder for float fields. + + This is like StructPackEncoder, but catches errors that may be due to + passing non-finite floating-point values to struct.pack, and makes a + second attempt to encode those values. + + Args: + wire_type: The field's wire type, for encoding tags. + format: The format string to pass to struct.pack(). + """ + + value_size = struct.calcsize(format) + if value_size == 4: + def EncodeNonFiniteOrRaise(write, value): + # Remember that the serialized form uses little-endian byte order. + if value == _POS_INF: + write(b'\x00\x00\x80\x7F') + elif value == _NEG_INF: + write(b'\x00\x00\x80\xFF') + elif value != value: # NaN + write(b'\x00\x00\xC0\x7F') + else: + raise + elif value_size == 8: + def EncodeNonFiniteOrRaise(write, value): + if value == _POS_INF: + write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F') + elif value == _NEG_INF: + write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF') + elif value != value: # NaN + write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F') + else: + raise + else: + raise ValueError('Can\'t encode floating-point values that are ' + '%d bytes long (only 4 or 8)' % value_size) + + def SpecificEncoder(field_number, is_repeated, is_packed): + local_struct_pack = struct.pack + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + local_EncodeVarint(write, len(value) * value_size, deterministic) + for element in value: + # This try/except block is going to be faster than any code that + # we could write to check whether element is finite. + try: + write(local_struct_pack(format, element)) + except SystemError: + EncodeNonFiniteOrRaise(write, element) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, unused_deterministic=None): + for element in value: + write(tag_bytes) + try: + write(local_struct_pack(format, element)) + except SystemError: + EncodeNonFiniteOrRaise(write, element) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, unused_deterministic=None): + write(tag_bytes) + try: + write(local_struct_pack(format, value)) + except SystemError: + EncodeNonFiniteOrRaise(write, value) + return EncodeField + + return SpecificEncoder + + +# ==================================================================== +# Here we declare an encoder constructor for each field type. These work +# very similarly to sizer constructors, described earlier. + + +Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder( + wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize) + +UInt32Encoder = UInt64Encoder = _SimpleEncoder( + wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize) + +SInt32Encoder = SInt64Encoder = _ModifiedEncoder( + wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize, + wire_format.ZigZagEncode) + +# Note that Python conveniently guarantees that when using the '<' prefix on +# formats, they will also have the same size across all platforms (as opposed +# to without the prefix, where their sizes depend on the C compiler's basic +# type sizes). +Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, ' str + ValueType = int + + def __init__(self, enum_type): + """Inits EnumTypeWrapper with an EnumDescriptor.""" + self._enum_type = enum_type + self.DESCRIPTOR = enum_type # pylint: disable=invalid-name + + def Name(self, number): # pylint: disable=invalid-name + """Returns a string containing the name of an enum value.""" + try: + return self._enum_type.values_by_number[number].name + except KeyError: + pass # fall out to break exception chaining + + if not isinstance(number, int): + raise TypeError( + 'Enum value for {} must be an int, but got {} {!r}.'.format( + self._enum_type.name, type(number), number)) + else: + # repr here to handle the odd case when you pass in a boolean. + raise ValueError('Enum {} has no name defined for value {!r}'.format( + self._enum_type.name, number)) + + def Value(self, name): # pylint: disable=invalid-name + """Returns the value corresponding to the given enum name.""" + try: + return self._enum_type.values_by_name[name].number + except KeyError: + pass # fall out to break exception chaining + raise ValueError('Enum {} has no value defined for name {!r}'.format( + self._enum_type.name, name)) + + def keys(self): + """Return a list of the string names in the enum. + + Returns: + A list of strs, in the order they were defined in the .proto file. + """ + + return [value_descriptor.name + for value_descriptor in self._enum_type.values] + + def values(self): + """Return a list of the integer values in the enum. + + Returns: + A list of ints, in the order they were defined in the .proto file. + """ + + return [value_descriptor.number + for value_descriptor in self._enum_type.values] + + def items(self): + """Return a list of the (name, value) pairs of the enum. + + Returns: + A list of (str, int) pairs, in the order they were defined + in the .proto file. + """ + return [(value_descriptor.name, value_descriptor.number) + for value_descriptor in self._enum_type.values] + + def __getattr__(self, name): + """Returns the value corresponding to the given enum name.""" + try: + return super( + EnumTypeWrapper, + self).__getattribute__('_enum_type').values_by_name[name].number + except KeyError: + pass # fall out to break exception chaining + raise AttributeError('Enum {} has no value defined for name {!r}'.format( + self._enum_type.name, name)) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/extension_dict.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/extension_dict.py new file mode 100644 index 0000000000..b346cf283e --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/extension_dict.py @@ -0,0 +1,213 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains _ExtensionDict class to represent extensions. +""" + +from google.protobuf.internal import type_checkers +from google.protobuf.descriptor import FieldDescriptor + + +def _VerifyExtensionHandle(message, extension_handle): + """Verify that the given extension handle is valid.""" + + if not isinstance(extension_handle, FieldDescriptor): + raise KeyError('HasExtension() expects an extension handle, got: %s' % + extension_handle) + + if not extension_handle.is_extension: + raise KeyError('"%s" is not an extension.' % extension_handle.full_name) + + if not extension_handle.containing_type: + raise KeyError('"%s" is missing a containing_type.' + % extension_handle.full_name) + + if extension_handle.containing_type is not message.DESCRIPTOR: + raise KeyError('Extension "%s" extends message type "%s", but this ' + 'message is of type "%s".' % + (extension_handle.full_name, + extension_handle.containing_type.full_name, + message.DESCRIPTOR.full_name)) + + +# TODO(robinson): Unify error handling of "unknown extension" crap. +# TODO(robinson): Support iteritems()-style iteration over all +# extensions with the "has" bits turned on? +class _ExtensionDict(object): + + """Dict-like container for Extension fields on proto instances. + + Note that in all cases we expect extension handles to be + FieldDescriptors. + """ + + def __init__(self, extended_message): + """ + Args: + extended_message: Message instance for which we are the Extensions dict. + """ + self._extended_message = extended_message + + def __getitem__(self, extension_handle): + """Returns the current value of the given extension handle.""" + + _VerifyExtensionHandle(self._extended_message, extension_handle) + + result = self._extended_message._fields.get(extension_handle) + if result is not None: + return result + + if extension_handle.label == FieldDescriptor.LABEL_REPEATED: + result = extension_handle._default_constructor(self._extended_message) + elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + message_type = extension_handle.message_type + if not hasattr(message_type, '_concrete_class'): + # pylint: disable=protected-access + self._extended_message._FACTORY.GetPrototype(message_type) + assert getattr(extension_handle.message_type, '_concrete_class', None), ( + 'Uninitialized concrete class found for field %r (message type %r)' + % (extension_handle.full_name, + extension_handle.message_type.full_name)) + result = extension_handle.message_type._concrete_class() + try: + result._SetListener(self._extended_message._listener_for_children) + except ReferenceError: + pass + else: + # Singular scalar -- just return the default without inserting into the + # dict. + return extension_handle.default_value + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + result = self._extended_message._fields.setdefault( + extension_handle, result) + + return result + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + + my_fields = self._extended_message.ListFields() + other_fields = other._extended_message.ListFields() + + # Get rid of non-extension fields. + my_fields = [field for field in my_fields if field.is_extension] + other_fields = [field for field in other_fields if field.is_extension] + + return my_fields == other_fields + + def __ne__(self, other): + return not self == other + + def __len__(self): + fields = self._extended_message.ListFields() + # Get rid of non-extension fields. + extension_fields = [field for field in fields if field[0].is_extension] + return len(extension_fields) + + def __hash__(self): + raise TypeError('unhashable object') + + # Note that this is only meaningful for non-repeated, scalar extension + # fields. Note also that we may have to call _Modified() when we do + # successfully set a field this way, to set any necessary "has" bits in the + # ancestors of the extended message. + def __setitem__(self, extension_handle, value): + """If extension_handle specifies a non-repeated, scalar extension + field, sets the value of that field. + """ + + _VerifyExtensionHandle(self._extended_message, extension_handle) + + if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or + extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE): + raise TypeError( + 'Cannot assign to extension "%s" because it is a repeated or ' + 'composite type.' % extension_handle.full_name) + + # It's slightly wasteful to lookup the type checker each time, + # but we expect this to be a vanishingly uncommon case anyway. + type_checker = type_checkers.GetTypeChecker(extension_handle) + # pylint: disable=protected-access + self._extended_message._fields[extension_handle] = ( + type_checker.CheckValue(value)) + self._extended_message._Modified() + + def __delitem__(self, extension_handle): + self._extended_message.ClearExtension(extension_handle) + + def _FindExtensionByName(self, name): + """Tries to find a known extension with the specified name. + + Args: + name: Extension full name. + + Returns: + Extension field descriptor. + """ + return self._extended_message._extensions_by_name.get(name, None) + + def _FindExtensionByNumber(self, number): + """Tries to find a known extension with the field number. + + Args: + number: Extension field number. + + Returns: + Extension field descriptor. + """ + return self._extended_message._extensions_by_number.get(number, None) + + def __iter__(self): + # Return a generator over the populated extension fields + return (f[0] for f in self._extended_message.ListFields() + if f[0].is_extension) + + def __contains__(self, extension_handle): + _VerifyExtensionHandle(self._extended_message, extension_handle) + + if extension_handle not in self._extended_message._fields: + return False + + if extension_handle.label == FieldDescriptor.LABEL_REPEATED: + return bool(self._extended_message._fields.get(extension_handle)) + + if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + value = self._extended_message._fields.get(extension_handle) + # pylint: disable=protected-access + return value is not None and value._is_present_in_parent + + return True diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/message_listener.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/message_listener.py new file mode 100644 index 0000000000..0fc255a774 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/message_listener.py @@ -0,0 +1,78 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Defines a listener interface for observing certain +state transitions on Message objects. + +Also defines a null implementation of this interface. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + + +class MessageListener(object): + + """Listens for modifications made to a message. Meant to be registered via + Message._SetListener(). + + Attributes: + dirty: If True, then calling Modified() would be a no-op. This can be + used to avoid these calls entirely in the common case. + """ + + def Modified(self): + """Called every time the message is modified in such a way that the parent + message may need to be updated. This currently means either: + (a) The message was modified for the first time, so the parent message + should henceforth mark the message as present. + (b) The message's cached byte size became dirty -- i.e. the message was + modified for the first time after a previous call to ByteSize(). + Therefore the parent should also mark its byte size as dirty. + Note that (a) implies (b), since new objects start out with a client cached + size (zero). However, we document (a) explicitly because it is important. + + Modified() will *only* be called in response to one of these two events -- + not every time the sub-message is modified. + + Note that if the listener's |dirty| attribute is true, then calling + Modified at the moment would be a no-op, so it can be skipped. Performance- + sensitive callers should check this attribute directly before calling since + it will be true most of the time. + """ + + raise NotImplementedError + + +class NullMessageListener(object): + + """No-op MessageListener implementation.""" + + def Modified(self): + pass diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/message_set_extensions_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/message_set_extensions_pb2.py new file mode 100644 index 0000000000..63651a3f19 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/message_set_extensions_pb2.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/message_set_extensions.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n5google/protobuf/internal/message_set_extensions.proto\x12\x18google.protobuf.internal\"\x1e\n\x0eTestMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"\xa5\x01\n\x18TestMessageSetExtension1\x12\t\n\x01i\x18\x0f \x01(\x05\x32~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xab\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension1\"\xa7\x01\n\x18TestMessageSetExtension2\x12\x0b\n\x03str\x18\x19 \x01(\t2~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xca\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension2\"(\n\x18TestMessageSetExtension3\x12\x0c\n\x04text\x18# \x01(\t:\x7f\n\x16message_set_extension3\x12(.google.protobuf.internal.TestMessageSet\x18\xdf\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.message_set_extensions_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestMessageSet.RegisterExtension(message_set_extension3) + TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension']) + TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension']) + + DESCRIPTOR._options = None + _TESTMESSAGESET._options = None + _TESTMESSAGESET._serialized_options = b'\010\001' + _TESTMESSAGESET._serialized_start=83 + _TESTMESSAGESET._serialized_end=113 + _TESTMESSAGESETEXTENSION1._serialized_start=116 + _TESTMESSAGESETEXTENSION1._serialized_end=281 + _TESTMESSAGESETEXTENSION2._serialized_start=284 + _TESTMESSAGESETEXTENSION2._serialized_end=451 + _TESTMESSAGESETEXTENSION3._serialized_start=453 + _TESTMESSAGESETEXTENSION3._serialized_end=493 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/missing_enum_values_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/missing_enum_values_pb2.py new file mode 100644 index 0000000000..5497083197 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/missing_enum_values_pb2.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/missing_enum_values.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n2google/protobuf/internal/missing_enum_values.proto\x12\x1fgoogle.protobuf.python.internal\"\xc1\x02\n\x0eTestEnumValues\x12X\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12X\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12Z\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnumB\x02\x10\x01\"\x1f\n\nNestedEnum\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\"\xd3\x02\n\x15TestMissingEnumValues\x12_\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12_\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12\x61\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnumB\x02\x10\x01\"\x15\n\nNestedEnum\x12\x07\n\x03TWO\x10\x02\"\x1b\n\nJustString\x12\r\n\x05\x64ummy\x18\x01 \x02(\t') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.missing_enum_values_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _TESTENUMVALUES.fields_by_name['packed_nested_enum']._options = None + _TESTENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' + _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._options = None + _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' + _TESTENUMVALUES._serialized_start=88 + _TESTENUMVALUES._serialized_end=409 + _TESTENUMVALUES_NESTEDENUM._serialized_start=378 + _TESTENUMVALUES_NESTEDENUM._serialized_end=409 + _TESTMISSINGENUMVALUES._serialized_start=412 + _TESTMISSINGENUMVALUES._serialized_end=751 + _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_start=730 + _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_end=751 + _JUSTSTRING._serialized_start=753 + _JUSTSTRING._serialized_end=780 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py new file mode 100644 index 0000000000..0953706bac --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_extensions_dynamic.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf.internal import more_extensions_pb2 as google_dot_protobuf_dot_internal_dot_more__extensions__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n6google/protobuf/internal/more_extensions_dynamic.proto\x12\x18google.protobuf.internal\x1a.google/protobuf/internal/more_extensions.proto\"\x1f\n\x12\x44ynamicMessageType\x12\t\n\x01\x61\x18\x01 \x01(\x05:J\n\x17\x64ynamic_int32_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x64 \x01(\x05:z\n\x19\x64ynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x65 \x01(\x0b\x32,.google.protobuf.internal.DynamicMessageType:\x83\x01\n\"repeated_dynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x66 \x03(\x0b\x32,.google.protobuf.internal.DynamicMessageType') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_dynamic_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_int32_extension) + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_message_extension) + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(repeated_dynamic_message_extension) + + DESCRIPTOR._options = None + _DYNAMICMESSAGETYPE._serialized_start=132 + _DYNAMICMESSAGETYPE._serialized_end=163 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_pb2.py new file mode 100644 index 0000000000..1cfa1b7c8b --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_pb2.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_extensions.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.google/protobuf/internal/more_extensions.proto\x12\x18google.protobuf.internal\"\x99\x01\n\x0fTopLevelMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\x12\x43\n\x0enested_message\x18\x02 \x01(\x0b\x32\'.google.protobuf.internal.NestedMessageB\x02(\x01\"R\n\rNestedMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\"K\n\x0f\x45xtendedMessage\x12\x17\n\x0eoptional_int32\x18\xe9\x07 \x01(\x05\x12\x18\n\x0frepeated_string\x18\xea\x07 \x03(\t*\x05\x08\x01\x10\xe8\x07\"-\n\x0e\x46oreignMessage\x12\x1b\n\x13\x66oreign_message_int\x18\x01 \x01(\x05:I\n\x16optional_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x01 \x01(\x05:w\n\x1aoptional_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x02 \x01(\x0b\x32(.google.protobuf.internal.ForeignMessage:I\n\x16repeated_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x03 \x03(\x05:w\n\x1arepeated_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x04 \x03(\x0b\x32(.google.protobuf.internal.ForeignMessage') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + ExtendedMessage.RegisterExtension(optional_int_extension) + ExtendedMessage.RegisterExtension(optional_message_extension) + ExtendedMessage.RegisterExtension(repeated_int_extension) + ExtendedMessage.RegisterExtension(repeated_message_extension) + + DESCRIPTOR._options = None + _TOPLEVELMESSAGE.fields_by_name['submessage']._options = None + _TOPLEVELMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' + _TOPLEVELMESSAGE.fields_by_name['nested_message']._options = None + _TOPLEVELMESSAGE.fields_by_name['nested_message']._serialized_options = b'(\001' + _NESTEDMESSAGE.fields_by_name['submessage']._options = None + _NESTEDMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' + _TOPLEVELMESSAGE._serialized_start=77 + _TOPLEVELMESSAGE._serialized_end=230 + _NESTEDMESSAGE._serialized_start=232 + _NESTEDMESSAGE._serialized_end=314 + _EXTENDEDMESSAGE._serialized_start=316 + _EXTENDEDMESSAGE._serialized_end=391 + _FOREIGNMESSAGE._serialized_start=393 + _FOREIGNMESSAGE._serialized_end=438 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/more_messages_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_messages_pb2.py new file mode 100644 index 0000000000..d7f7115609 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_messages_pb2.py @@ -0,0 +1,556 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_messages.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,google/protobuf/internal/more_messages.proto\x12\x18google.protobuf.internal\"h\n\x10OutOfOrderFields\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05*\x04\x08\x04\x10\x05*\x04\x08\x02\x10\x03\"\xcd\x02\n\x05\x63lass\x12\x1b\n\tint_field\x18\x01 \x01(\x05R\x08json_int\x12\n\n\x02if\x18\x02 \x01(\x05\x12(\n\x02\x61s\x18\x03 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12\x30\n\nenum_field\x18\x04 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12>\n\x11nested_enum_field\x18\x05 \x01(\x0e\x32#.google.protobuf.internal.class.for\x12;\n\x0enested_message\x18\x06 \x01(\x0b\x32#.google.protobuf.internal.class.try\x1a\x1c\n\x03try\x12\r\n\x05\x66ield\x18\x01 \x01(\x05*\x06\x08\xe7\x07\x10\x90N\"\x1c\n\x03\x66or\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04True\x10\x01*\x06\x08\xe7\x07\x10\x90N\"?\n\x0b\x45xtendClass20\n\x06return\x12\x1f.google.protobuf.internal.class\x18\xea\x07 \x01(\x05\"~\n\x0fTestFullKeyword\x12:\n\x06\x66ield1\x18\x01 \x01(\x0b\x32*.google.protobuf.internal.OutOfOrderFields\x12/\n\x06\x66ield2\x18\x02 \x01(\x0b\x32\x1f.google.protobuf.internal.class\"\xa5\x0f\n\x11LotsNestedMessage\x1a\x04\n\x02\x42\x30\x1a\x04\n\x02\x42\x31\x1a\x04\n\x02\x42\x32\x1a\x04\n\x02\x42\x33\x1a\x04\n\x02\x42\x34\x1a\x04\n\x02\x42\x35\x1a\x04\n\x02\x42\x36\x1a\x04\n\x02\x42\x37\x1a\x04\n\x02\x42\x38\x1a\x04\n\x02\x42\x39\x1a\x05\n\x03\x42\x31\x30\x1a\x05\n\x03\x42\x31\x31\x1a\x05\n\x03\x42\x31\x32\x1a\x05\n\x03\x42\x31\x33\x1a\x05\n\x03\x42\x31\x34\x1a\x05\n\x03\x42\x31\x35\x1a\x05\n\x03\x42\x31\x36\x1a\x05\n\x03\x42\x31\x37\x1a\x05\n\x03\x42\x31\x38\x1a\x05\n\x03\x42\x31\x39\x1a\x05\n\x03\x42\x32\x30\x1a\x05\n\x03\x42\x32\x31\x1a\x05\n\x03\x42\x32\x32\x1a\x05\n\x03\x42\x32\x33\x1a\x05\n\x03\x42\x32\x34\x1a\x05\n\x03\x42\x32\x35\x1a\x05\n\x03\x42\x32\x36\x1a\x05\n\x03\x42\x32\x37\x1a\x05\n\x03\x42\x32\x38\x1a\x05\n\x03\x42\x32\x39\x1a\x05\n\x03\x42\x33\x30\x1a\x05\n\x03\x42\x33\x31\x1a\x05\n\x03\x42\x33\x32\x1a\x05\n\x03\x42\x33\x33\x1a\x05\n\x03\x42\x33\x34\x1a\x05\n\x03\x42\x33\x35\x1a\x05\n\x03\x42\x33\x36\x1a\x05\n\x03\x42\x33\x37\x1a\x05\n\x03\x42\x33\x38\x1a\x05\n\x03\x42\x33\x39\x1a\x05\n\x03\x42\x34\x30\x1a\x05\n\x03\x42\x34\x31\x1a\x05\n\x03\x42\x34\x32\x1a\x05\n\x03\x42\x34\x33\x1a\x05\n\x03\x42\x34\x34\x1a\x05\n\x03\x42\x34\x35\x1a\x05\n\x03\x42\x34\x36\x1a\x05\n\x03\x42\x34\x37\x1a\x05\n\x03\x42\x34\x38\x1a\x05\n\x03\x42\x34\x39\x1a\x05\n\x03\x42\x35\x30\x1a\x05\n\x03\x42\x35\x31\x1a\x05\n\x03\x42\x35\x32\x1a\x05\n\x03\x42\x35\x33\x1a\x05\n\x03\x42\x35\x34\x1a\x05\n\x03\x42\x35\x35\x1a\x05\n\x03\x42\x35\x36\x1a\x05\n\x03\x42\x35\x37\x1a\x05\n\x03\x42\x35\x38\x1a\x05\n\x03\x42\x35\x39\x1a\x05\n\x03\x42\x36\x30\x1a\x05\n\x03\x42\x36\x31\x1a\x05\n\x03\x42\x36\x32\x1a\x05\n\x03\x42\x36\x33\x1a\x05\n\x03\x42\x36\x34\x1a\x05\n\x03\x42\x36\x35\x1a\x05\n\x03\x42\x36\x36\x1a\x05\n\x03\x42\x36\x37\x1a\x05\n\x03\x42\x36\x38\x1a\x05\n\x03\x42\x36\x39\x1a\x05\n\x03\x42\x37\x30\x1a\x05\n\x03\x42\x37\x31\x1a\x05\n\x03\x42\x37\x32\x1a\x05\n\x03\x42\x37\x33\x1a\x05\n\x03\x42\x37\x34\x1a\x05\n\x03\x42\x37\x35\x1a\x05\n\x03\x42\x37\x36\x1a\x05\n\x03\x42\x37\x37\x1a\x05\n\x03\x42\x37\x38\x1a\x05\n\x03\x42\x37\x39\x1a\x05\n\x03\x42\x38\x30\x1a\x05\n\x03\x42\x38\x31\x1a\x05\n\x03\x42\x38\x32\x1a\x05\n\x03\x42\x38\x33\x1a\x05\n\x03\x42\x38\x34\x1a\x05\n\x03\x42\x38\x35\x1a\x05\n\x03\x42\x38\x36\x1a\x05\n\x03\x42\x38\x37\x1a\x05\n\x03\x42\x38\x38\x1a\x05\n\x03\x42\x38\x39\x1a\x05\n\x03\x42\x39\x30\x1a\x05\n\x03\x42\x39\x31\x1a\x05\n\x03\x42\x39\x32\x1a\x05\n\x03\x42\x39\x33\x1a\x05\n\x03\x42\x39\x34\x1a\x05\n\x03\x42\x39\x35\x1a\x05\n\x03\x42\x39\x36\x1a\x05\n\x03\x42\x39\x37\x1a\x05\n\x03\x42\x39\x38\x1a\x05\n\x03\x42\x39\x39\x1a\x06\n\x04\x42\x31\x30\x30\x1a\x06\n\x04\x42\x31\x30\x31\x1a\x06\n\x04\x42\x31\x30\x32\x1a\x06\n\x04\x42\x31\x30\x33\x1a\x06\n\x04\x42\x31\x30\x34\x1a\x06\n\x04\x42\x31\x30\x35\x1a\x06\n\x04\x42\x31\x30\x36\x1a\x06\n\x04\x42\x31\x30\x37\x1a\x06\n\x04\x42\x31\x30\x38\x1a\x06\n\x04\x42\x31\x30\x39\x1a\x06\n\x04\x42\x31\x31\x30\x1a\x06\n\x04\x42\x31\x31\x31\x1a\x06\n\x04\x42\x31\x31\x32\x1a\x06\n\x04\x42\x31\x31\x33\x1a\x06\n\x04\x42\x31\x31\x34\x1a\x06\n\x04\x42\x31\x31\x35\x1a\x06\n\x04\x42\x31\x31\x36\x1a\x06\n\x04\x42\x31\x31\x37\x1a\x06\n\x04\x42\x31\x31\x38\x1a\x06\n\x04\x42\x31\x31\x39\x1a\x06\n\x04\x42\x31\x32\x30\x1a\x06\n\x04\x42\x31\x32\x31\x1a\x06\n\x04\x42\x31\x32\x32\x1a\x06\n\x04\x42\x31\x32\x33\x1a\x06\n\x04\x42\x31\x32\x34\x1a\x06\n\x04\x42\x31\x32\x35\x1a\x06\n\x04\x42\x31\x32\x36\x1a\x06\n\x04\x42\x31\x32\x37\x1a\x06\n\x04\x42\x31\x32\x38\x1a\x06\n\x04\x42\x31\x32\x39\x1a\x06\n\x04\x42\x31\x33\x30\x1a\x06\n\x04\x42\x31\x33\x31\x1a\x06\n\x04\x42\x31\x33\x32\x1a\x06\n\x04\x42\x31\x33\x33\x1a\x06\n\x04\x42\x31\x33\x34\x1a\x06\n\x04\x42\x31\x33\x35\x1a\x06\n\x04\x42\x31\x33\x36\x1a\x06\n\x04\x42\x31\x33\x37\x1a\x06\n\x04\x42\x31\x33\x38\x1a\x06\n\x04\x42\x31\x33\x39\x1a\x06\n\x04\x42\x31\x34\x30\x1a\x06\n\x04\x42\x31\x34\x31\x1a\x06\n\x04\x42\x31\x34\x32\x1a\x06\n\x04\x42\x31\x34\x33\x1a\x06\n\x04\x42\x31\x34\x34\x1a\x06\n\x04\x42\x31\x34\x35\x1a\x06\n\x04\x42\x31\x34\x36\x1a\x06\n\x04\x42\x31\x34\x37\x1a\x06\n\x04\x42\x31\x34\x38\x1a\x06\n\x04\x42\x31\x34\x39\x1a\x06\n\x04\x42\x31\x35\x30\x1a\x06\n\x04\x42\x31\x35\x31\x1a\x06\n\x04\x42\x31\x35\x32\x1a\x06\n\x04\x42\x31\x35\x33\x1a\x06\n\x04\x42\x31\x35\x34\x1a\x06\n\x04\x42\x31\x35\x35\x1a\x06\n\x04\x42\x31\x35\x36\x1a\x06\n\x04\x42\x31\x35\x37\x1a\x06\n\x04\x42\x31\x35\x38\x1a\x06\n\x04\x42\x31\x35\x39\x1a\x06\n\x04\x42\x31\x36\x30\x1a\x06\n\x04\x42\x31\x36\x31\x1a\x06\n\x04\x42\x31\x36\x32\x1a\x06\n\x04\x42\x31\x36\x33\x1a\x06\n\x04\x42\x31\x36\x34\x1a\x06\n\x04\x42\x31\x36\x35\x1a\x06\n\x04\x42\x31\x36\x36\x1a\x06\n\x04\x42\x31\x36\x37\x1a\x06\n\x04\x42\x31\x36\x38\x1a\x06\n\x04\x42\x31\x36\x39\x1a\x06\n\x04\x42\x31\x37\x30\x1a\x06\n\x04\x42\x31\x37\x31\x1a\x06\n\x04\x42\x31\x37\x32\x1a\x06\n\x04\x42\x31\x37\x33\x1a\x06\n\x04\x42\x31\x37\x34\x1a\x06\n\x04\x42\x31\x37\x35\x1a\x06\n\x04\x42\x31\x37\x36\x1a\x06\n\x04\x42\x31\x37\x37\x1a\x06\n\x04\x42\x31\x37\x38\x1a\x06\n\x04\x42\x31\x37\x39\x1a\x06\n\x04\x42\x31\x38\x30\x1a\x06\n\x04\x42\x31\x38\x31\x1a\x06\n\x04\x42\x31\x38\x32\x1a\x06\n\x04\x42\x31\x38\x33\x1a\x06\n\x04\x42\x31\x38\x34\x1a\x06\n\x04\x42\x31\x38\x35\x1a\x06\n\x04\x42\x31\x38\x36\x1a\x06\n\x04\x42\x31\x38\x37\x1a\x06\n\x04\x42\x31\x38\x38\x1a\x06\n\x04\x42\x31\x38\x39\x1a\x06\n\x04\x42\x31\x39\x30\x1a\x06\n\x04\x42\x31\x39\x31\x1a\x06\n\x04\x42\x31\x39\x32\x1a\x06\n\x04\x42\x31\x39\x33\x1a\x06\n\x04\x42\x31\x39\x34\x1a\x06\n\x04\x42\x31\x39\x35\x1a\x06\n\x04\x42\x31\x39\x36\x1a\x06\n\x04\x42\x31\x39\x37\x1a\x06\n\x04\x42\x31\x39\x38\x1a\x06\n\x04\x42\x31\x39\x39\x1a\x06\n\x04\x42\x32\x30\x30\x1a\x06\n\x04\x42\x32\x30\x31\x1a\x06\n\x04\x42\x32\x30\x32\x1a\x06\n\x04\x42\x32\x30\x33\x1a\x06\n\x04\x42\x32\x30\x34\x1a\x06\n\x04\x42\x32\x30\x35\x1a\x06\n\x04\x42\x32\x30\x36\x1a\x06\n\x04\x42\x32\x30\x37\x1a\x06\n\x04\x42\x32\x30\x38\x1a\x06\n\x04\x42\x32\x30\x39\x1a\x06\n\x04\x42\x32\x31\x30\x1a\x06\n\x04\x42\x32\x31\x31\x1a\x06\n\x04\x42\x32\x31\x32\x1a\x06\n\x04\x42\x32\x31\x33\x1a\x06\n\x04\x42\x32\x31\x34\x1a\x06\n\x04\x42\x32\x31\x35\x1a\x06\n\x04\x42\x32\x31\x36\x1a\x06\n\x04\x42\x32\x31\x37\x1a\x06\n\x04\x42\x32\x31\x38\x1a\x06\n\x04\x42\x32\x31\x39\x1a\x06\n\x04\x42\x32\x32\x30\x1a\x06\n\x04\x42\x32\x32\x31\x1a\x06\n\x04\x42\x32\x32\x32\x1a\x06\n\x04\x42\x32\x32\x33\x1a\x06\n\x04\x42\x32\x32\x34\x1a\x06\n\x04\x42\x32\x32\x35\x1a\x06\n\x04\x42\x32\x32\x36\x1a\x06\n\x04\x42\x32\x32\x37\x1a\x06\n\x04\x42\x32\x32\x38\x1a\x06\n\x04\x42\x32\x32\x39\x1a\x06\n\x04\x42\x32\x33\x30\x1a\x06\n\x04\x42\x32\x33\x31\x1a\x06\n\x04\x42\x32\x33\x32\x1a\x06\n\x04\x42\x32\x33\x33\x1a\x06\n\x04\x42\x32\x33\x34\x1a\x06\n\x04\x42\x32\x33\x35\x1a\x06\n\x04\x42\x32\x33\x36\x1a\x06\n\x04\x42\x32\x33\x37\x1a\x06\n\x04\x42\x32\x33\x38\x1a\x06\n\x04\x42\x32\x33\x39\x1a\x06\n\x04\x42\x32\x34\x30\x1a\x06\n\x04\x42\x32\x34\x31\x1a\x06\n\x04\x42\x32\x34\x32\x1a\x06\n\x04\x42\x32\x34\x33\x1a\x06\n\x04\x42\x32\x34\x34\x1a\x06\n\x04\x42\x32\x34\x35\x1a\x06\n\x04\x42\x32\x34\x36\x1a\x06\n\x04\x42\x32\x34\x37\x1a\x06\n\x04\x42\x32\x34\x38\x1a\x06\n\x04\x42\x32\x34\x39\x1a\x06\n\x04\x42\x32\x35\x30\x1a\x06\n\x04\x42\x32\x35\x31\x1a\x06\n\x04\x42\x32\x35\x32\x1a\x06\n\x04\x42\x32\x35\x33\x1a\x06\n\x04\x42\x32\x35\x34\x1a\x06\n\x04\x42\x32\x35\x35*\x1b\n\x02is\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04\x65lse\x10\x01:C\n\x0foptional_uint64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x04 \x01(\x04:B\n\x0eoptional_int64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x02 \x01(\x03:2\n\x08\x63ontinue\x12\x1f.google.protobuf.internal.class\x18\xe9\x07 \x01(\x05:2\n\x04with\x12#.google.protobuf.internal.class.try\x18\xe9\x07 \x01(\x05') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_messages_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + OutOfOrderFields.RegisterExtension(optional_uint64) + OutOfOrderFields.RegisterExtension(optional_int64) + globals()['class'].RegisterExtension(globals()['continue']) + getattr(globals()['class'], 'try').RegisterExtension(globals()['with']) + globals()['class'].RegisterExtension(_EXTENDCLASS.extensions_by_name['return']) + + DESCRIPTOR._options = None + _IS._serialized_start=2669 + _IS._serialized_end=2696 + _OUTOFORDERFIELDS._serialized_start=74 + _OUTOFORDERFIELDS._serialized_end=178 + _CLASS._serialized_start=181 + _CLASS._serialized_end=514 + _CLASS_TRY._serialized_start=448 + _CLASS_TRY._serialized_end=476 + _CLASS_FOR._serialized_start=478 + _CLASS_FOR._serialized_end=506 + _EXTENDCLASS._serialized_start=516 + _EXTENDCLASS._serialized_end=579 + _TESTFULLKEYWORD._serialized_start=581 + _TESTFULLKEYWORD._serialized_end=707 + _LOTSNESTEDMESSAGE._serialized_start=710 + _LOTSNESTEDMESSAGE._serialized_end=2667 + _LOTSNESTEDMESSAGE_B0._serialized_start=731 + _LOTSNESTEDMESSAGE_B0._serialized_end=735 + _LOTSNESTEDMESSAGE_B1._serialized_start=737 + _LOTSNESTEDMESSAGE_B1._serialized_end=741 + _LOTSNESTEDMESSAGE_B2._serialized_start=743 + _LOTSNESTEDMESSAGE_B2._serialized_end=747 + _LOTSNESTEDMESSAGE_B3._serialized_start=749 + _LOTSNESTEDMESSAGE_B3._serialized_end=753 + _LOTSNESTEDMESSAGE_B4._serialized_start=755 + _LOTSNESTEDMESSAGE_B4._serialized_end=759 + _LOTSNESTEDMESSAGE_B5._serialized_start=761 + _LOTSNESTEDMESSAGE_B5._serialized_end=765 + _LOTSNESTEDMESSAGE_B6._serialized_start=767 + _LOTSNESTEDMESSAGE_B6._serialized_end=771 + _LOTSNESTEDMESSAGE_B7._serialized_start=773 + _LOTSNESTEDMESSAGE_B7._serialized_end=777 + _LOTSNESTEDMESSAGE_B8._serialized_start=779 + _LOTSNESTEDMESSAGE_B8._serialized_end=783 + _LOTSNESTEDMESSAGE_B9._serialized_start=785 + _LOTSNESTEDMESSAGE_B9._serialized_end=789 + _LOTSNESTEDMESSAGE_B10._serialized_start=791 + _LOTSNESTEDMESSAGE_B10._serialized_end=796 + _LOTSNESTEDMESSAGE_B11._serialized_start=798 + _LOTSNESTEDMESSAGE_B11._serialized_end=803 + _LOTSNESTEDMESSAGE_B12._serialized_start=805 + _LOTSNESTEDMESSAGE_B12._serialized_end=810 + _LOTSNESTEDMESSAGE_B13._serialized_start=812 + _LOTSNESTEDMESSAGE_B13._serialized_end=817 + _LOTSNESTEDMESSAGE_B14._serialized_start=819 + _LOTSNESTEDMESSAGE_B14._serialized_end=824 + _LOTSNESTEDMESSAGE_B15._serialized_start=826 + _LOTSNESTEDMESSAGE_B15._serialized_end=831 + _LOTSNESTEDMESSAGE_B16._serialized_start=833 + _LOTSNESTEDMESSAGE_B16._serialized_end=838 + _LOTSNESTEDMESSAGE_B17._serialized_start=840 + _LOTSNESTEDMESSAGE_B17._serialized_end=845 + _LOTSNESTEDMESSAGE_B18._serialized_start=847 + _LOTSNESTEDMESSAGE_B18._serialized_end=852 + _LOTSNESTEDMESSAGE_B19._serialized_start=854 + _LOTSNESTEDMESSAGE_B19._serialized_end=859 + _LOTSNESTEDMESSAGE_B20._serialized_start=861 + _LOTSNESTEDMESSAGE_B20._serialized_end=866 + _LOTSNESTEDMESSAGE_B21._serialized_start=868 + _LOTSNESTEDMESSAGE_B21._serialized_end=873 + _LOTSNESTEDMESSAGE_B22._serialized_start=875 + _LOTSNESTEDMESSAGE_B22._serialized_end=880 + _LOTSNESTEDMESSAGE_B23._serialized_start=882 + _LOTSNESTEDMESSAGE_B23._serialized_end=887 + _LOTSNESTEDMESSAGE_B24._serialized_start=889 + _LOTSNESTEDMESSAGE_B24._serialized_end=894 + _LOTSNESTEDMESSAGE_B25._serialized_start=896 + _LOTSNESTEDMESSAGE_B25._serialized_end=901 + _LOTSNESTEDMESSAGE_B26._serialized_start=903 + _LOTSNESTEDMESSAGE_B26._serialized_end=908 + _LOTSNESTEDMESSAGE_B27._serialized_start=910 + _LOTSNESTEDMESSAGE_B27._serialized_end=915 + _LOTSNESTEDMESSAGE_B28._serialized_start=917 + _LOTSNESTEDMESSAGE_B28._serialized_end=922 + _LOTSNESTEDMESSAGE_B29._serialized_start=924 + _LOTSNESTEDMESSAGE_B29._serialized_end=929 + _LOTSNESTEDMESSAGE_B30._serialized_start=931 + _LOTSNESTEDMESSAGE_B30._serialized_end=936 + _LOTSNESTEDMESSAGE_B31._serialized_start=938 + _LOTSNESTEDMESSAGE_B31._serialized_end=943 + _LOTSNESTEDMESSAGE_B32._serialized_start=945 + _LOTSNESTEDMESSAGE_B32._serialized_end=950 + _LOTSNESTEDMESSAGE_B33._serialized_start=952 + _LOTSNESTEDMESSAGE_B33._serialized_end=957 + _LOTSNESTEDMESSAGE_B34._serialized_start=959 + _LOTSNESTEDMESSAGE_B34._serialized_end=964 + _LOTSNESTEDMESSAGE_B35._serialized_start=966 + _LOTSNESTEDMESSAGE_B35._serialized_end=971 + _LOTSNESTEDMESSAGE_B36._serialized_start=973 + _LOTSNESTEDMESSAGE_B36._serialized_end=978 + _LOTSNESTEDMESSAGE_B37._serialized_start=980 + _LOTSNESTEDMESSAGE_B37._serialized_end=985 + _LOTSNESTEDMESSAGE_B38._serialized_start=987 + _LOTSNESTEDMESSAGE_B38._serialized_end=992 + _LOTSNESTEDMESSAGE_B39._serialized_start=994 + _LOTSNESTEDMESSAGE_B39._serialized_end=999 + _LOTSNESTEDMESSAGE_B40._serialized_start=1001 + _LOTSNESTEDMESSAGE_B40._serialized_end=1006 + _LOTSNESTEDMESSAGE_B41._serialized_start=1008 + _LOTSNESTEDMESSAGE_B41._serialized_end=1013 + _LOTSNESTEDMESSAGE_B42._serialized_start=1015 + _LOTSNESTEDMESSAGE_B42._serialized_end=1020 + _LOTSNESTEDMESSAGE_B43._serialized_start=1022 + _LOTSNESTEDMESSAGE_B43._serialized_end=1027 + _LOTSNESTEDMESSAGE_B44._serialized_start=1029 + _LOTSNESTEDMESSAGE_B44._serialized_end=1034 + _LOTSNESTEDMESSAGE_B45._serialized_start=1036 + _LOTSNESTEDMESSAGE_B45._serialized_end=1041 + _LOTSNESTEDMESSAGE_B46._serialized_start=1043 + _LOTSNESTEDMESSAGE_B46._serialized_end=1048 + _LOTSNESTEDMESSAGE_B47._serialized_start=1050 + _LOTSNESTEDMESSAGE_B47._serialized_end=1055 + _LOTSNESTEDMESSAGE_B48._serialized_start=1057 + _LOTSNESTEDMESSAGE_B48._serialized_end=1062 + _LOTSNESTEDMESSAGE_B49._serialized_start=1064 + _LOTSNESTEDMESSAGE_B49._serialized_end=1069 + _LOTSNESTEDMESSAGE_B50._serialized_start=1071 + _LOTSNESTEDMESSAGE_B50._serialized_end=1076 + _LOTSNESTEDMESSAGE_B51._serialized_start=1078 + _LOTSNESTEDMESSAGE_B51._serialized_end=1083 + _LOTSNESTEDMESSAGE_B52._serialized_start=1085 + _LOTSNESTEDMESSAGE_B52._serialized_end=1090 + _LOTSNESTEDMESSAGE_B53._serialized_start=1092 + _LOTSNESTEDMESSAGE_B53._serialized_end=1097 + _LOTSNESTEDMESSAGE_B54._serialized_start=1099 + _LOTSNESTEDMESSAGE_B54._serialized_end=1104 + _LOTSNESTEDMESSAGE_B55._serialized_start=1106 + _LOTSNESTEDMESSAGE_B55._serialized_end=1111 + _LOTSNESTEDMESSAGE_B56._serialized_start=1113 + _LOTSNESTEDMESSAGE_B56._serialized_end=1118 + _LOTSNESTEDMESSAGE_B57._serialized_start=1120 + _LOTSNESTEDMESSAGE_B57._serialized_end=1125 + _LOTSNESTEDMESSAGE_B58._serialized_start=1127 + _LOTSNESTEDMESSAGE_B58._serialized_end=1132 + _LOTSNESTEDMESSAGE_B59._serialized_start=1134 + _LOTSNESTEDMESSAGE_B59._serialized_end=1139 + _LOTSNESTEDMESSAGE_B60._serialized_start=1141 + _LOTSNESTEDMESSAGE_B60._serialized_end=1146 + _LOTSNESTEDMESSAGE_B61._serialized_start=1148 + _LOTSNESTEDMESSAGE_B61._serialized_end=1153 + _LOTSNESTEDMESSAGE_B62._serialized_start=1155 + _LOTSNESTEDMESSAGE_B62._serialized_end=1160 + _LOTSNESTEDMESSAGE_B63._serialized_start=1162 + _LOTSNESTEDMESSAGE_B63._serialized_end=1167 + _LOTSNESTEDMESSAGE_B64._serialized_start=1169 + _LOTSNESTEDMESSAGE_B64._serialized_end=1174 + _LOTSNESTEDMESSAGE_B65._serialized_start=1176 + _LOTSNESTEDMESSAGE_B65._serialized_end=1181 + _LOTSNESTEDMESSAGE_B66._serialized_start=1183 + _LOTSNESTEDMESSAGE_B66._serialized_end=1188 + _LOTSNESTEDMESSAGE_B67._serialized_start=1190 + _LOTSNESTEDMESSAGE_B67._serialized_end=1195 + _LOTSNESTEDMESSAGE_B68._serialized_start=1197 + _LOTSNESTEDMESSAGE_B68._serialized_end=1202 + _LOTSNESTEDMESSAGE_B69._serialized_start=1204 + _LOTSNESTEDMESSAGE_B69._serialized_end=1209 + _LOTSNESTEDMESSAGE_B70._serialized_start=1211 + _LOTSNESTEDMESSAGE_B70._serialized_end=1216 + _LOTSNESTEDMESSAGE_B71._serialized_start=1218 + _LOTSNESTEDMESSAGE_B71._serialized_end=1223 + _LOTSNESTEDMESSAGE_B72._serialized_start=1225 + _LOTSNESTEDMESSAGE_B72._serialized_end=1230 + _LOTSNESTEDMESSAGE_B73._serialized_start=1232 + _LOTSNESTEDMESSAGE_B73._serialized_end=1237 + _LOTSNESTEDMESSAGE_B74._serialized_start=1239 + _LOTSNESTEDMESSAGE_B74._serialized_end=1244 + _LOTSNESTEDMESSAGE_B75._serialized_start=1246 + _LOTSNESTEDMESSAGE_B75._serialized_end=1251 + _LOTSNESTEDMESSAGE_B76._serialized_start=1253 + _LOTSNESTEDMESSAGE_B76._serialized_end=1258 + _LOTSNESTEDMESSAGE_B77._serialized_start=1260 + _LOTSNESTEDMESSAGE_B77._serialized_end=1265 + _LOTSNESTEDMESSAGE_B78._serialized_start=1267 + _LOTSNESTEDMESSAGE_B78._serialized_end=1272 + _LOTSNESTEDMESSAGE_B79._serialized_start=1274 + _LOTSNESTEDMESSAGE_B79._serialized_end=1279 + _LOTSNESTEDMESSAGE_B80._serialized_start=1281 + _LOTSNESTEDMESSAGE_B80._serialized_end=1286 + _LOTSNESTEDMESSAGE_B81._serialized_start=1288 + _LOTSNESTEDMESSAGE_B81._serialized_end=1293 + _LOTSNESTEDMESSAGE_B82._serialized_start=1295 + _LOTSNESTEDMESSAGE_B82._serialized_end=1300 + _LOTSNESTEDMESSAGE_B83._serialized_start=1302 + _LOTSNESTEDMESSAGE_B83._serialized_end=1307 + _LOTSNESTEDMESSAGE_B84._serialized_start=1309 + _LOTSNESTEDMESSAGE_B84._serialized_end=1314 + _LOTSNESTEDMESSAGE_B85._serialized_start=1316 + _LOTSNESTEDMESSAGE_B85._serialized_end=1321 + _LOTSNESTEDMESSAGE_B86._serialized_start=1323 + _LOTSNESTEDMESSAGE_B86._serialized_end=1328 + _LOTSNESTEDMESSAGE_B87._serialized_start=1330 + _LOTSNESTEDMESSAGE_B87._serialized_end=1335 + _LOTSNESTEDMESSAGE_B88._serialized_start=1337 + _LOTSNESTEDMESSAGE_B88._serialized_end=1342 + _LOTSNESTEDMESSAGE_B89._serialized_start=1344 + _LOTSNESTEDMESSAGE_B89._serialized_end=1349 + _LOTSNESTEDMESSAGE_B90._serialized_start=1351 + _LOTSNESTEDMESSAGE_B90._serialized_end=1356 + _LOTSNESTEDMESSAGE_B91._serialized_start=1358 + _LOTSNESTEDMESSAGE_B91._serialized_end=1363 + _LOTSNESTEDMESSAGE_B92._serialized_start=1365 + _LOTSNESTEDMESSAGE_B92._serialized_end=1370 + _LOTSNESTEDMESSAGE_B93._serialized_start=1372 + _LOTSNESTEDMESSAGE_B93._serialized_end=1377 + _LOTSNESTEDMESSAGE_B94._serialized_start=1379 + _LOTSNESTEDMESSAGE_B94._serialized_end=1384 + _LOTSNESTEDMESSAGE_B95._serialized_start=1386 + _LOTSNESTEDMESSAGE_B95._serialized_end=1391 + _LOTSNESTEDMESSAGE_B96._serialized_start=1393 + _LOTSNESTEDMESSAGE_B96._serialized_end=1398 + _LOTSNESTEDMESSAGE_B97._serialized_start=1400 + _LOTSNESTEDMESSAGE_B97._serialized_end=1405 + _LOTSNESTEDMESSAGE_B98._serialized_start=1407 + _LOTSNESTEDMESSAGE_B98._serialized_end=1412 + _LOTSNESTEDMESSAGE_B99._serialized_start=1414 + _LOTSNESTEDMESSAGE_B99._serialized_end=1419 + _LOTSNESTEDMESSAGE_B100._serialized_start=1421 + _LOTSNESTEDMESSAGE_B100._serialized_end=1427 + _LOTSNESTEDMESSAGE_B101._serialized_start=1429 + _LOTSNESTEDMESSAGE_B101._serialized_end=1435 + _LOTSNESTEDMESSAGE_B102._serialized_start=1437 + _LOTSNESTEDMESSAGE_B102._serialized_end=1443 + _LOTSNESTEDMESSAGE_B103._serialized_start=1445 + _LOTSNESTEDMESSAGE_B103._serialized_end=1451 + _LOTSNESTEDMESSAGE_B104._serialized_start=1453 + _LOTSNESTEDMESSAGE_B104._serialized_end=1459 + _LOTSNESTEDMESSAGE_B105._serialized_start=1461 + _LOTSNESTEDMESSAGE_B105._serialized_end=1467 + _LOTSNESTEDMESSAGE_B106._serialized_start=1469 + _LOTSNESTEDMESSAGE_B106._serialized_end=1475 + _LOTSNESTEDMESSAGE_B107._serialized_start=1477 + _LOTSNESTEDMESSAGE_B107._serialized_end=1483 + _LOTSNESTEDMESSAGE_B108._serialized_start=1485 + _LOTSNESTEDMESSAGE_B108._serialized_end=1491 + _LOTSNESTEDMESSAGE_B109._serialized_start=1493 + _LOTSNESTEDMESSAGE_B109._serialized_end=1499 + _LOTSNESTEDMESSAGE_B110._serialized_start=1501 + _LOTSNESTEDMESSAGE_B110._serialized_end=1507 + _LOTSNESTEDMESSAGE_B111._serialized_start=1509 + _LOTSNESTEDMESSAGE_B111._serialized_end=1515 + _LOTSNESTEDMESSAGE_B112._serialized_start=1517 + _LOTSNESTEDMESSAGE_B112._serialized_end=1523 + _LOTSNESTEDMESSAGE_B113._serialized_start=1525 + _LOTSNESTEDMESSAGE_B113._serialized_end=1531 + _LOTSNESTEDMESSAGE_B114._serialized_start=1533 + _LOTSNESTEDMESSAGE_B114._serialized_end=1539 + _LOTSNESTEDMESSAGE_B115._serialized_start=1541 + _LOTSNESTEDMESSAGE_B115._serialized_end=1547 + _LOTSNESTEDMESSAGE_B116._serialized_start=1549 + _LOTSNESTEDMESSAGE_B116._serialized_end=1555 + _LOTSNESTEDMESSAGE_B117._serialized_start=1557 + _LOTSNESTEDMESSAGE_B117._serialized_end=1563 + _LOTSNESTEDMESSAGE_B118._serialized_start=1565 + _LOTSNESTEDMESSAGE_B118._serialized_end=1571 + _LOTSNESTEDMESSAGE_B119._serialized_start=1573 + _LOTSNESTEDMESSAGE_B119._serialized_end=1579 + _LOTSNESTEDMESSAGE_B120._serialized_start=1581 + _LOTSNESTEDMESSAGE_B120._serialized_end=1587 + _LOTSNESTEDMESSAGE_B121._serialized_start=1589 + _LOTSNESTEDMESSAGE_B121._serialized_end=1595 + _LOTSNESTEDMESSAGE_B122._serialized_start=1597 + _LOTSNESTEDMESSAGE_B122._serialized_end=1603 + _LOTSNESTEDMESSAGE_B123._serialized_start=1605 + _LOTSNESTEDMESSAGE_B123._serialized_end=1611 + _LOTSNESTEDMESSAGE_B124._serialized_start=1613 + _LOTSNESTEDMESSAGE_B124._serialized_end=1619 + _LOTSNESTEDMESSAGE_B125._serialized_start=1621 + _LOTSNESTEDMESSAGE_B125._serialized_end=1627 + _LOTSNESTEDMESSAGE_B126._serialized_start=1629 + _LOTSNESTEDMESSAGE_B126._serialized_end=1635 + _LOTSNESTEDMESSAGE_B127._serialized_start=1637 + _LOTSNESTEDMESSAGE_B127._serialized_end=1643 + _LOTSNESTEDMESSAGE_B128._serialized_start=1645 + _LOTSNESTEDMESSAGE_B128._serialized_end=1651 + _LOTSNESTEDMESSAGE_B129._serialized_start=1653 + _LOTSNESTEDMESSAGE_B129._serialized_end=1659 + _LOTSNESTEDMESSAGE_B130._serialized_start=1661 + _LOTSNESTEDMESSAGE_B130._serialized_end=1667 + _LOTSNESTEDMESSAGE_B131._serialized_start=1669 + _LOTSNESTEDMESSAGE_B131._serialized_end=1675 + _LOTSNESTEDMESSAGE_B132._serialized_start=1677 + _LOTSNESTEDMESSAGE_B132._serialized_end=1683 + _LOTSNESTEDMESSAGE_B133._serialized_start=1685 + _LOTSNESTEDMESSAGE_B133._serialized_end=1691 + _LOTSNESTEDMESSAGE_B134._serialized_start=1693 + _LOTSNESTEDMESSAGE_B134._serialized_end=1699 + _LOTSNESTEDMESSAGE_B135._serialized_start=1701 + _LOTSNESTEDMESSAGE_B135._serialized_end=1707 + _LOTSNESTEDMESSAGE_B136._serialized_start=1709 + _LOTSNESTEDMESSAGE_B136._serialized_end=1715 + _LOTSNESTEDMESSAGE_B137._serialized_start=1717 + _LOTSNESTEDMESSAGE_B137._serialized_end=1723 + _LOTSNESTEDMESSAGE_B138._serialized_start=1725 + _LOTSNESTEDMESSAGE_B138._serialized_end=1731 + _LOTSNESTEDMESSAGE_B139._serialized_start=1733 + _LOTSNESTEDMESSAGE_B139._serialized_end=1739 + _LOTSNESTEDMESSAGE_B140._serialized_start=1741 + _LOTSNESTEDMESSAGE_B140._serialized_end=1747 + _LOTSNESTEDMESSAGE_B141._serialized_start=1749 + _LOTSNESTEDMESSAGE_B141._serialized_end=1755 + _LOTSNESTEDMESSAGE_B142._serialized_start=1757 + _LOTSNESTEDMESSAGE_B142._serialized_end=1763 + _LOTSNESTEDMESSAGE_B143._serialized_start=1765 + _LOTSNESTEDMESSAGE_B143._serialized_end=1771 + _LOTSNESTEDMESSAGE_B144._serialized_start=1773 + _LOTSNESTEDMESSAGE_B144._serialized_end=1779 + _LOTSNESTEDMESSAGE_B145._serialized_start=1781 + _LOTSNESTEDMESSAGE_B145._serialized_end=1787 + _LOTSNESTEDMESSAGE_B146._serialized_start=1789 + _LOTSNESTEDMESSAGE_B146._serialized_end=1795 + _LOTSNESTEDMESSAGE_B147._serialized_start=1797 + _LOTSNESTEDMESSAGE_B147._serialized_end=1803 + _LOTSNESTEDMESSAGE_B148._serialized_start=1805 + _LOTSNESTEDMESSAGE_B148._serialized_end=1811 + _LOTSNESTEDMESSAGE_B149._serialized_start=1813 + _LOTSNESTEDMESSAGE_B149._serialized_end=1819 + _LOTSNESTEDMESSAGE_B150._serialized_start=1821 + _LOTSNESTEDMESSAGE_B150._serialized_end=1827 + _LOTSNESTEDMESSAGE_B151._serialized_start=1829 + _LOTSNESTEDMESSAGE_B151._serialized_end=1835 + _LOTSNESTEDMESSAGE_B152._serialized_start=1837 + _LOTSNESTEDMESSAGE_B152._serialized_end=1843 + _LOTSNESTEDMESSAGE_B153._serialized_start=1845 + _LOTSNESTEDMESSAGE_B153._serialized_end=1851 + _LOTSNESTEDMESSAGE_B154._serialized_start=1853 + _LOTSNESTEDMESSAGE_B154._serialized_end=1859 + _LOTSNESTEDMESSAGE_B155._serialized_start=1861 + _LOTSNESTEDMESSAGE_B155._serialized_end=1867 + _LOTSNESTEDMESSAGE_B156._serialized_start=1869 + _LOTSNESTEDMESSAGE_B156._serialized_end=1875 + _LOTSNESTEDMESSAGE_B157._serialized_start=1877 + _LOTSNESTEDMESSAGE_B157._serialized_end=1883 + _LOTSNESTEDMESSAGE_B158._serialized_start=1885 + _LOTSNESTEDMESSAGE_B158._serialized_end=1891 + _LOTSNESTEDMESSAGE_B159._serialized_start=1893 + _LOTSNESTEDMESSAGE_B159._serialized_end=1899 + _LOTSNESTEDMESSAGE_B160._serialized_start=1901 + _LOTSNESTEDMESSAGE_B160._serialized_end=1907 + _LOTSNESTEDMESSAGE_B161._serialized_start=1909 + _LOTSNESTEDMESSAGE_B161._serialized_end=1915 + _LOTSNESTEDMESSAGE_B162._serialized_start=1917 + _LOTSNESTEDMESSAGE_B162._serialized_end=1923 + _LOTSNESTEDMESSAGE_B163._serialized_start=1925 + _LOTSNESTEDMESSAGE_B163._serialized_end=1931 + _LOTSNESTEDMESSAGE_B164._serialized_start=1933 + _LOTSNESTEDMESSAGE_B164._serialized_end=1939 + _LOTSNESTEDMESSAGE_B165._serialized_start=1941 + _LOTSNESTEDMESSAGE_B165._serialized_end=1947 + _LOTSNESTEDMESSAGE_B166._serialized_start=1949 + _LOTSNESTEDMESSAGE_B166._serialized_end=1955 + _LOTSNESTEDMESSAGE_B167._serialized_start=1957 + _LOTSNESTEDMESSAGE_B167._serialized_end=1963 + _LOTSNESTEDMESSAGE_B168._serialized_start=1965 + _LOTSNESTEDMESSAGE_B168._serialized_end=1971 + _LOTSNESTEDMESSAGE_B169._serialized_start=1973 + _LOTSNESTEDMESSAGE_B169._serialized_end=1979 + _LOTSNESTEDMESSAGE_B170._serialized_start=1981 + _LOTSNESTEDMESSAGE_B170._serialized_end=1987 + _LOTSNESTEDMESSAGE_B171._serialized_start=1989 + _LOTSNESTEDMESSAGE_B171._serialized_end=1995 + _LOTSNESTEDMESSAGE_B172._serialized_start=1997 + _LOTSNESTEDMESSAGE_B172._serialized_end=2003 + _LOTSNESTEDMESSAGE_B173._serialized_start=2005 + _LOTSNESTEDMESSAGE_B173._serialized_end=2011 + _LOTSNESTEDMESSAGE_B174._serialized_start=2013 + _LOTSNESTEDMESSAGE_B174._serialized_end=2019 + _LOTSNESTEDMESSAGE_B175._serialized_start=2021 + _LOTSNESTEDMESSAGE_B175._serialized_end=2027 + _LOTSNESTEDMESSAGE_B176._serialized_start=2029 + _LOTSNESTEDMESSAGE_B176._serialized_end=2035 + _LOTSNESTEDMESSAGE_B177._serialized_start=2037 + _LOTSNESTEDMESSAGE_B177._serialized_end=2043 + _LOTSNESTEDMESSAGE_B178._serialized_start=2045 + _LOTSNESTEDMESSAGE_B178._serialized_end=2051 + _LOTSNESTEDMESSAGE_B179._serialized_start=2053 + _LOTSNESTEDMESSAGE_B179._serialized_end=2059 + _LOTSNESTEDMESSAGE_B180._serialized_start=2061 + _LOTSNESTEDMESSAGE_B180._serialized_end=2067 + _LOTSNESTEDMESSAGE_B181._serialized_start=2069 + _LOTSNESTEDMESSAGE_B181._serialized_end=2075 + _LOTSNESTEDMESSAGE_B182._serialized_start=2077 + _LOTSNESTEDMESSAGE_B182._serialized_end=2083 + _LOTSNESTEDMESSAGE_B183._serialized_start=2085 + _LOTSNESTEDMESSAGE_B183._serialized_end=2091 + _LOTSNESTEDMESSAGE_B184._serialized_start=2093 + _LOTSNESTEDMESSAGE_B184._serialized_end=2099 + _LOTSNESTEDMESSAGE_B185._serialized_start=2101 + _LOTSNESTEDMESSAGE_B185._serialized_end=2107 + _LOTSNESTEDMESSAGE_B186._serialized_start=2109 + _LOTSNESTEDMESSAGE_B186._serialized_end=2115 + _LOTSNESTEDMESSAGE_B187._serialized_start=2117 + _LOTSNESTEDMESSAGE_B187._serialized_end=2123 + _LOTSNESTEDMESSAGE_B188._serialized_start=2125 + _LOTSNESTEDMESSAGE_B188._serialized_end=2131 + _LOTSNESTEDMESSAGE_B189._serialized_start=2133 + _LOTSNESTEDMESSAGE_B189._serialized_end=2139 + _LOTSNESTEDMESSAGE_B190._serialized_start=2141 + _LOTSNESTEDMESSAGE_B190._serialized_end=2147 + _LOTSNESTEDMESSAGE_B191._serialized_start=2149 + _LOTSNESTEDMESSAGE_B191._serialized_end=2155 + _LOTSNESTEDMESSAGE_B192._serialized_start=2157 + _LOTSNESTEDMESSAGE_B192._serialized_end=2163 + _LOTSNESTEDMESSAGE_B193._serialized_start=2165 + _LOTSNESTEDMESSAGE_B193._serialized_end=2171 + _LOTSNESTEDMESSAGE_B194._serialized_start=2173 + _LOTSNESTEDMESSAGE_B194._serialized_end=2179 + _LOTSNESTEDMESSAGE_B195._serialized_start=2181 + _LOTSNESTEDMESSAGE_B195._serialized_end=2187 + _LOTSNESTEDMESSAGE_B196._serialized_start=2189 + _LOTSNESTEDMESSAGE_B196._serialized_end=2195 + _LOTSNESTEDMESSAGE_B197._serialized_start=2197 + _LOTSNESTEDMESSAGE_B197._serialized_end=2203 + _LOTSNESTEDMESSAGE_B198._serialized_start=2205 + _LOTSNESTEDMESSAGE_B198._serialized_end=2211 + _LOTSNESTEDMESSAGE_B199._serialized_start=2213 + _LOTSNESTEDMESSAGE_B199._serialized_end=2219 + _LOTSNESTEDMESSAGE_B200._serialized_start=2221 + _LOTSNESTEDMESSAGE_B200._serialized_end=2227 + _LOTSNESTEDMESSAGE_B201._serialized_start=2229 + _LOTSNESTEDMESSAGE_B201._serialized_end=2235 + _LOTSNESTEDMESSAGE_B202._serialized_start=2237 + _LOTSNESTEDMESSAGE_B202._serialized_end=2243 + _LOTSNESTEDMESSAGE_B203._serialized_start=2245 + _LOTSNESTEDMESSAGE_B203._serialized_end=2251 + _LOTSNESTEDMESSAGE_B204._serialized_start=2253 + _LOTSNESTEDMESSAGE_B204._serialized_end=2259 + _LOTSNESTEDMESSAGE_B205._serialized_start=2261 + _LOTSNESTEDMESSAGE_B205._serialized_end=2267 + _LOTSNESTEDMESSAGE_B206._serialized_start=2269 + _LOTSNESTEDMESSAGE_B206._serialized_end=2275 + _LOTSNESTEDMESSAGE_B207._serialized_start=2277 + _LOTSNESTEDMESSAGE_B207._serialized_end=2283 + _LOTSNESTEDMESSAGE_B208._serialized_start=2285 + _LOTSNESTEDMESSAGE_B208._serialized_end=2291 + _LOTSNESTEDMESSAGE_B209._serialized_start=2293 + _LOTSNESTEDMESSAGE_B209._serialized_end=2299 + _LOTSNESTEDMESSAGE_B210._serialized_start=2301 + _LOTSNESTEDMESSAGE_B210._serialized_end=2307 + _LOTSNESTEDMESSAGE_B211._serialized_start=2309 + _LOTSNESTEDMESSAGE_B211._serialized_end=2315 + _LOTSNESTEDMESSAGE_B212._serialized_start=2317 + _LOTSNESTEDMESSAGE_B212._serialized_end=2323 + _LOTSNESTEDMESSAGE_B213._serialized_start=2325 + _LOTSNESTEDMESSAGE_B213._serialized_end=2331 + _LOTSNESTEDMESSAGE_B214._serialized_start=2333 + _LOTSNESTEDMESSAGE_B214._serialized_end=2339 + _LOTSNESTEDMESSAGE_B215._serialized_start=2341 + _LOTSNESTEDMESSAGE_B215._serialized_end=2347 + _LOTSNESTEDMESSAGE_B216._serialized_start=2349 + _LOTSNESTEDMESSAGE_B216._serialized_end=2355 + _LOTSNESTEDMESSAGE_B217._serialized_start=2357 + _LOTSNESTEDMESSAGE_B217._serialized_end=2363 + _LOTSNESTEDMESSAGE_B218._serialized_start=2365 + _LOTSNESTEDMESSAGE_B218._serialized_end=2371 + _LOTSNESTEDMESSAGE_B219._serialized_start=2373 + _LOTSNESTEDMESSAGE_B219._serialized_end=2379 + _LOTSNESTEDMESSAGE_B220._serialized_start=2381 + _LOTSNESTEDMESSAGE_B220._serialized_end=2387 + _LOTSNESTEDMESSAGE_B221._serialized_start=2389 + _LOTSNESTEDMESSAGE_B221._serialized_end=2395 + _LOTSNESTEDMESSAGE_B222._serialized_start=2397 + _LOTSNESTEDMESSAGE_B222._serialized_end=2403 + _LOTSNESTEDMESSAGE_B223._serialized_start=2405 + _LOTSNESTEDMESSAGE_B223._serialized_end=2411 + _LOTSNESTEDMESSAGE_B224._serialized_start=2413 + _LOTSNESTEDMESSAGE_B224._serialized_end=2419 + _LOTSNESTEDMESSAGE_B225._serialized_start=2421 + _LOTSNESTEDMESSAGE_B225._serialized_end=2427 + _LOTSNESTEDMESSAGE_B226._serialized_start=2429 + _LOTSNESTEDMESSAGE_B226._serialized_end=2435 + _LOTSNESTEDMESSAGE_B227._serialized_start=2437 + _LOTSNESTEDMESSAGE_B227._serialized_end=2443 + _LOTSNESTEDMESSAGE_B228._serialized_start=2445 + _LOTSNESTEDMESSAGE_B228._serialized_end=2451 + _LOTSNESTEDMESSAGE_B229._serialized_start=2453 + _LOTSNESTEDMESSAGE_B229._serialized_end=2459 + _LOTSNESTEDMESSAGE_B230._serialized_start=2461 + _LOTSNESTEDMESSAGE_B230._serialized_end=2467 + _LOTSNESTEDMESSAGE_B231._serialized_start=2469 + _LOTSNESTEDMESSAGE_B231._serialized_end=2475 + _LOTSNESTEDMESSAGE_B232._serialized_start=2477 + _LOTSNESTEDMESSAGE_B232._serialized_end=2483 + _LOTSNESTEDMESSAGE_B233._serialized_start=2485 + _LOTSNESTEDMESSAGE_B233._serialized_end=2491 + _LOTSNESTEDMESSAGE_B234._serialized_start=2493 + _LOTSNESTEDMESSAGE_B234._serialized_end=2499 + _LOTSNESTEDMESSAGE_B235._serialized_start=2501 + _LOTSNESTEDMESSAGE_B235._serialized_end=2507 + _LOTSNESTEDMESSAGE_B236._serialized_start=2509 + _LOTSNESTEDMESSAGE_B236._serialized_end=2515 + _LOTSNESTEDMESSAGE_B237._serialized_start=2517 + _LOTSNESTEDMESSAGE_B237._serialized_end=2523 + _LOTSNESTEDMESSAGE_B238._serialized_start=2525 + _LOTSNESTEDMESSAGE_B238._serialized_end=2531 + _LOTSNESTEDMESSAGE_B239._serialized_start=2533 + _LOTSNESTEDMESSAGE_B239._serialized_end=2539 + _LOTSNESTEDMESSAGE_B240._serialized_start=2541 + _LOTSNESTEDMESSAGE_B240._serialized_end=2547 + _LOTSNESTEDMESSAGE_B241._serialized_start=2549 + _LOTSNESTEDMESSAGE_B241._serialized_end=2555 + _LOTSNESTEDMESSAGE_B242._serialized_start=2557 + _LOTSNESTEDMESSAGE_B242._serialized_end=2563 + _LOTSNESTEDMESSAGE_B243._serialized_start=2565 + _LOTSNESTEDMESSAGE_B243._serialized_end=2571 + _LOTSNESTEDMESSAGE_B244._serialized_start=2573 + _LOTSNESTEDMESSAGE_B244._serialized_end=2579 + _LOTSNESTEDMESSAGE_B245._serialized_start=2581 + _LOTSNESTEDMESSAGE_B245._serialized_end=2587 + _LOTSNESTEDMESSAGE_B246._serialized_start=2589 + _LOTSNESTEDMESSAGE_B246._serialized_end=2595 + _LOTSNESTEDMESSAGE_B247._serialized_start=2597 + _LOTSNESTEDMESSAGE_B247._serialized_end=2603 + _LOTSNESTEDMESSAGE_B248._serialized_start=2605 + _LOTSNESTEDMESSAGE_B248._serialized_end=2611 + _LOTSNESTEDMESSAGE_B249._serialized_start=2613 + _LOTSNESTEDMESSAGE_B249._serialized_end=2619 + _LOTSNESTEDMESSAGE_B250._serialized_start=2621 + _LOTSNESTEDMESSAGE_B250._serialized_end=2627 + _LOTSNESTEDMESSAGE_B251._serialized_start=2629 + _LOTSNESTEDMESSAGE_B251._serialized_end=2635 + _LOTSNESTEDMESSAGE_B252._serialized_start=2637 + _LOTSNESTEDMESSAGE_B252._serialized_end=2643 + _LOTSNESTEDMESSAGE_B253._serialized_start=2645 + _LOTSNESTEDMESSAGE_B253._serialized_end=2651 + _LOTSNESTEDMESSAGE_B254._serialized_start=2653 + _LOTSNESTEDMESSAGE_B254._serialized_end=2659 + _LOTSNESTEDMESSAGE_B255._serialized_start=2661 + _LOTSNESTEDMESSAGE_B255._serialized_end=2667 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/no_package_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/no_package_pb2.py new file mode 100644 index 0000000000..d46dee080a --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/no_package_pb2.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/no_package.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)google/protobuf/internal/no_package.proto\";\n\x10NoPackageMessage\x12\'\n\x0fno_package_enum\x18\x01 \x01(\x0e\x32\x0e.NoPackageEnum*?\n\rNoPackageEnum\x12\x16\n\x12NO_PACKAGE_VALUE_0\x10\x00\x12\x16\n\x12NO_PACKAGE_VALUE_1\x10\x01') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.no_package_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _NOPACKAGEENUM._serialized_start=106 + _NOPACKAGEENUM._serialized_end=169 + _NOPACKAGEMESSAGE._serialized_start=45 + _NOPACKAGEMESSAGE._serialized_end=104 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/python_message.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/python_message.py new file mode 100644 index 0000000000..2921d5cb6e --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/python_message.py @@ -0,0 +1,1539 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# This code is meant to work on Python 2.4 and above only. +# +# TODO(robinson): Helpers for verbose, common checks like seeing if a +# descriptor's cpp_type is CPPTYPE_MESSAGE. + +"""Contains a metaclass and helper functions used to create +protocol message classes from Descriptor objects at runtime. + +Recall that a metaclass is the "type" of a class. +(A class is to a metaclass what an instance is to a class.) + +In this case, we use the GeneratedProtocolMessageType metaclass +to inject all the useful functionality into the classes +output by the protocol compiler at compile-time. + +The upshot of all this is that the real implementation +details for ALL pure-Python protocol buffers are *here in +this file*. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +from io import BytesIO +import struct +import sys +import weakref + +# We use "as" to avoid name collisions with variables. +from google.protobuf.internal import api_implementation +from google.protobuf.internal import containers +from google.protobuf.internal import decoder +from google.protobuf.internal import encoder +from google.protobuf.internal import enum_type_wrapper +from google.protobuf.internal import extension_dict +from google.protobuf.internal import message_listener as message_listener_mod +from google.protobuf.internal import type_checkers +from google.protobuf.internal import well_known_types +from google.protobuf.internal import wire_format +from google.protobuf import descriptor as descriptor_mod +from google.protobuf import message as message_mod +from google.protobuf import text_format + +_FieldDescriptor = descriptor_mod.FieldDescriptor +_AnyFullTypeName = 'google.protobuf.Any' +_ExtensionDict = extension_dict._ExtensionDict + +class GeneratedProtocolMessageType(type): + + """Metaclass for protocol message classes created at runtime from Descriptors. + + We add implementations for all methods described in the Message class. We + also create properties to allow getting/setting all fields in the protocol + message. Finally, we create slots to prevent users from accidentally + "setting" nonexistent fields in the protocol message, which then wouldn't get + serialized / deserialized properly. + + The protocol compiler currently uses this metaclass to create protocol + message classes at runtime. Clients can also manually create their own + classes at runtime, as in this example: + + mydescriptor = Descriptor(.....) + factory = symbol_database.Default() + factory.pool.AddDescriptor(mydescriptor) + MyProtoClass = factory.GetPrototype(mydescriptor) + myproto_instance = MyProtoClass() + myproto.foo_field = 23 + ... + """ + + # Must be consistent with the protocol-compiler code in + # proto2/compiler/internal/generator.*. + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __new__(cls, name, bases, dictionary): + """Custom allocation for runtime-generated class types. + + We override __new__ because this is apparently the only place + where we can meaningfully set __slots__ on the class we're creating(?). + (The interplay between metaclasses and slots is not very well-documented). + + Args: + name: Name of the class (ignored, but required by the + metaclass protocol). + bases: Base classes of the class we're constructing. + (Should be message.Message). We ignore this field, but + it's required by the metaclass protocol + dictionary: The class dictionary of the class we're + constructing. dictionary[_DESCRIPTOR_KEY] must contain + a Descriptor object describing this protocol message + type. + + Returns: + Newly-allocated class. + + Raises: + RuntimeError: Generated code only work with python cpp extension. + """ + descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] + + if isinstance(descriptor, str): + raise RuntimeError('The generated code only work with python cpp ' + 'extension, but it is using pure python runtime.') + + # If a concrete class already exists for this descriptor, don't try to + # create another. Doing so will break any messages that already exist with + # the existing class. + # + # The C++ implementation appears to have its own internal `PyMessageFactory` + # to achieve similar results. + # + # This most commonly happens in `text_format.py` when using descriptors from + # a custom pool; it calls symbol_database.Global().getPrototype() on a + # descriptor which already has an existing concrete class. + new_class = getattr(descriptor, '_concrete_class', None) + if new_class: + return new_class + + if descriptor.full_name in well_known_types.WKTBASES: + bases += (well_known_types.WKTBASES[descriptor.full_name],) + _AddClassAttributesForNestedExtensions(descriptor, dictionary) + _AddSlots(descriptor, dictionary) + + superclass = super(GeneratedProtocolMessageType, cls) + new_class = superclass.__new__(cls, name, bases, dictionary) + return new_class + + def __init__(cls, name, bases, dictionary): + """Here we perform the majority of our work on the class. + We add enum getters, an __init__ method, implementations + of all Message methods, and properties for all fields + in the protocol type. + + Args: + name: Name of the class (ignored, but required by the + metaclass protocol). + bases: Base classes of the class we're constructing. + (Should be message.Message). We ignore this field, but + it's required by the metaclass protocol + dictionary: The class dictionary of the class we're + constructing. dictionary[_DESCRIPTOR_KEY] must contain + a Descriptor object describing this protocol message + type. + """ + descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] + + # If this is an _existing_ class looked up via `_concrete_class` in the + # __new__ method above, then we don't need to re-initialize anything. + existing_class = getattr(descriptor, '_concrete_class', None) + if existing_class: + assert existing_class is cls, ( + 'Duplicate `GeneratedProtocolMessageType` created for descriptor %r' + % (descriptor.full_name)) + return + + cls._decoders_by_tag = {} + if (descriptor.has_options and + descriptor.GetOptions().message_set_wire_format): + cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = ( + decoder.MessageSetItemDecoder(descriptor), None) + + # Attach stuff to each FieldDescriptor for quick lookup later on. + for field in descriptor.fields: + _AttachFieldHelpers(cls, field) + + descriptor._concrete_class = cls # pylint: disable=protected-access + _AddEnumValues(descriptor, cls) + _AddInitMethod(descriptor, cls) + _AddPropertiesForFields(descriptor, cls) + _AddPropertiesForExtensions(descriptor, cls) + _AddStaticMethods(cls) + _AddMessageMethods(descriptor, cls) + _AddPrivateHelperMethods(descriptor, cls) + + superclass = super(GeneratedProtocolMessageType, cls) + superclass.__init__(name, bases, dictionary) + + +# Stateless helpers for GeneratedProtocolMessageType below. +# Outside clients should not access these directly. +# +# I opted not to make any of these methods on the metaclass, to make it more +# clear that I'm not really using any state there and to keep clients from +# thinking that they have direct access to these construction helpers. + + +def _PropertyName(proto_field_name): + """Returns the name of the public property attribute which + clients can use to get and (in some cases) set the value + of a protocol message field. + + Args: + proto_field_name: The protocol message field name, exactly + as it appears (or would appear) in a .proto file. + """ + # TODO(robinson): Escape Python keywords (e.g., yield), and test this support. + # nnorwitz makes my day by writing: + # """ + # FYI. See the keyword module in the stdlib. This could be as simple as: + # + # if keyword.iskeyword(proto_field_name): + # return proto_field_name + "_" + # return proto_field_name + # """ + # Kenton says: The above is a BAD IDEA. People rely on being able to use + # getattr() and setattr() to reflectively manipulate field values. If we + # rename the properties, then every such user has to also make sure to apply + # the same transformation. Note that currently if you name a field "yield", + # you can still access it just fine using getattr/setattr -- it's not even + # that cumbersome to do so. + # TODO(kenton): Remove this method entirely if/when everyone agrees with my + # position. + return proto_field_name + + +def _AddSlots(message_descriptor, dictionary): + """Adds a __slots__ entry to dictionary, containing the names of all valid + attributes for this message type. + + Args: + message_descriptor: A Descriptor instance describing this message type. + dictionary: Class dictionary to which we'll add a '__slots__' entry. + """ + dictionary['__slots__'] = ['_cached_byte_size', + '_cached_byte_size_dirty', + '_fields', + '_unknown_fields', + '_unknown_field_set', + '_is_present_in_parent', + '_listener', + '_listener_for_children', + '__weakref__', + '_oneofs'] + + +def _IsMessageSetExtension(field): + return (field.is_extension and + field.containing_type.has_options and + field.containing_type.GetOptions().message_set_wire_format and + field.type == _FieldDescriptor.TYPE_MESSAGE and + field.label == _FieldDescriptor.LABEL_OPTIONAL) + + +def _IsMapField(field): + return (field.type == _FieldDescriptor.TYPE_MESSAGE and + field.message_type.has_options and + field.message_type.GetOptions().map_entry) + + +def _IsMessageMapField(field): + value_type = field.message_type.fields_by_name['value'] + return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE + + +def _AttachFieldHelpers(cls, field_descriptor): + is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED) + is_packable = (is_repeated and + wire_format.IsTypePackable(field_descriptor.type)) + is_proto3 = field_descriptor.containing_type.syntax == 'proto3' + if not is_packable: + is_packed = False + elif field_descriptor.containing_type.syntax == 'proto2': + is_packed = (field_descriptor.has_options and + field_descriptor.GetOptions().packed) + else: + has_packed_false = (field_descriptor.has_options and + field_descriptor.GetOptions().HasField('packed') and + field_descriptor.GetOptions().packed == False) + is_packed = not has_packed_false + is_map_entry = _IsMapField(field_descriptor) + + if is_map_entry: + field_encoder = encoder.MapEncoder(field_descriptor) + sizer = encoder.MapSizer(field_descriptor, + _IsMessageMapField(field_descriptor)) + elif _IsMessageSetExtension(field_descriptor): + field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number) + sizer = encoder.MessageSetItemSizer(field_descriptor.number) + else: + field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type]( + field_descriptor.number, is_repeated, is_packed) + sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type]( + field_descriptor.number, is_repeated, is_packed) + + field_descriptor._encoder = field_encoder + field_descriptor._sizer = sizer + field_descriptor._default_constructor = _DefaultValueConstructorForField( + field_descriptor) + + def AddDecoder(wiretype, is_packed): + tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype) + decode_type = field_descriptor.type + if (decode_type == _FieldDescriptor.TYPE_ENUM and + type_checkers.SupportsOpenEnums(field_descriptor)): + decode_type = _FieldDescriptor.TYPE_INT32 + + oneof_descriptor = None + clear_if_default = False + if field_descriptor.containing_oneof is not None: + oneof_descriptor = field_descriptor + elif (is_proto3 and not is_repeated and + field_descriptor.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE): + clear_if_default = True + + if is_map_entry: + is_message_map = _IsMessageMapField(field_descriptor) + + field_decoder = decoder.MapDecoder( + field_descriptor, _GetInitializeDefaultForMap(field_descriptor), + is_message_map) + elif decode_type == _FieldDescriptor.TYPE_STRING: + field_decoder = decoder.StringDecoder( + field_descriptor.number, is_repeated, is_packed, + field_descriptor, field_descriptor._default_constructor, + clear_if_default) + elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( + field_descriptor.number, is_repeated, is_packed, + field_descriptor, field_descriptor._default_constructor) + else: + field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( + field_descriptor.number, is_repeated, is_packed, + # pylint: disable=protected-access + field_descriptor, field_descriptor._default_constructor, + clear_if_default) + + cls._decoders_by_tag[tag_bytes] = (field_decoder, oneof_descriptor) + + AddDecoder(type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type], + False) + + if is_repeated and wire_format.IsTypePackable(field_descriptor.type): + # To support wire compatibility of adding packed = true, add a decoder for + # packed values regardless of the field's options. + AddDecoder(wire_format.WIRETYPE_LENGTH_DELIMITED, True) + + +def _AddClassAttributesForNestedExtensions(descriptor, dictionary): + extensions = descriptor.extensions_by_name + for extension_name, extension_field in extensions.items(): + assert extension_name not in dictionary + dictionary[extension_name] = extension_field + + +def _AddEnumValues(descriptor, cls): + """Sets class-level attributes for all enum fields defined in this message. + + Also exporting a class-level object that can name enum values. + + Args: + descriptor: Descriptor object for this message type. + cls: Class we're constructing for this message type. + """ + for enum_type in descriptor.enum_types: + setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type)) + for enum_value in enum_type.values: + setattr(cls, enum_value.name, enum_value.number) + + +def _GetInitializeDefaultForMap(field): + if field.label != _FieldDescriptor.LABEL_REPEATED: + raise ValueError('map_entry set on non-repeated field %s' % ( + field.name)) + fields_by_name = field.message_type.fields_by_name + key_checker = type_checkers.GetTypeChecker(fields_by_name['key']) + + value_field = fields_by_name['value'] + if _IsMessageMapField(field): + def MakeMessageMapDefault(message): + return containers.MessageMap( + message._listener_for_children, value_field.message_type, key_checker, + field.message_type) + return MakeMessageMapDefault + else: + value_checker = type_checkers.GetTypeChecker(value_field) + def MakePrimitiveMapDefault(message): + return containers.ScalarMap( + message._listener_for_children, key_checker, value_checker, + field.message_type) + return MakePrimitiveMapDefault + +def _DefaultValueConstructorForField(field): + """Returns a function which returns a default value for a field. + + Args: + field: FieldDescriptor object for this field. + + The returned function has one argument: + message: Message instance containing this field, or a weakref proxy + of same. + + That function in turn returns a default value for this field. The default + value may refer back to |message| via a weak reference. + """ + + if _IsMapField(field): + return _GetInitializeDefaultForMap(field) + + if field.label == _FieldDescriptor.LABEL_REPEATED: + if field.has_default_value and field.default_value != []: + raise ValueError('Repeated field default value not empty list: %s' % ( + field.default_value)) + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + # We can't look at _concrete_class yet since it might not have + # been set. (Depends on order in which we initialize the classes). + message_type = field.message_type + def MakeRepeatedMessageDefault(message): + return containers.RepeatedCompositeFieldContainer( + message._listener_for_children, field.message_type) + return MakeRepeatedMessageDefault + else: + type_checker = type_checkers.GetTypeChecker(field) + def MakeRepeatedScalarDefault(message): + return containers.RepeatedScalarFieldContainer( + message._listener_for_children, type_checker) + return MakeRepeatedScalarDefault + + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + # _concrete_class may not yet be initialized. + message_type = field.message_type + def MakeSubMessageDefault(message): + assert getattr(message_type, '_concrete_class', None), ( + 'Uninitialized concrete class found for field %r (message type %r)' + % (field.full_name, message_type.full_name)) + result = message_type._concrete_class() + result._SetListener( + _OneofListener(message, field) + if field.containing_oneof is not None + else message._listener_for_children) + return result + return MakeSubMessageDefault + + def MakeScalarDefault(message): + # TODO(protobuf-team): This may be broken since there may not be + # default_value. Combine with has_default_value somehow. + return field.default_value + return MakeScalarDefault + + +def _ReraiseTypeErrorWithFieldName(message_name, field_name): + """Re-raise the currently-handled TypeError with the field name added.""" + exc = sys.exc_info()[1] + if len(exc.args) == 1 and type(exc) is TypeError: + # simple TypeError; add field name to exception message + exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name)) + + # re-raise possibly-amended exception with original traceback: + raise exc.with_traceback(sys.exc_info()[2]) + + +def _AddInitMethod(message_descriptor, cls): + """Adds an __init__ method to cls.""" + + def _GetIntegerEnumValue(enum_type, value): + """Convert a string or integer enum value to an integer. + + If the value is a string, it is converted to the enum value in + enum_type with the same name. If the value is not a string, it's + returned as-is. (No conversion or bounds-checking is done.) + """ + if isinstance(value, str): + try: + return enum_type.values_by_name[value].number + except KeyError: + raise ValueError('Enum type %s: unknown label "%s"' % ( + enum_type.full_name, value)) + return value + + def init(self, **kwargs): + self._cached_byte_size = 0 + self._cached_byte_size_dirty = len(kwargs) > 0 + self._fields = {} + # Contains a mapping from oneof field descriptors to the descriptor + # of the currently set field in that oneof field. + self._oneofs = {} + + # _unknown_fields is () when empty for efficiency, and will be turned into + # a list if fields are added. + self._unknown_fields = () + # _unknown_field_set is None when empty for efficiency, and will be + # turned into UnknownFieldSet struct if fields are added. + self._unknown_field_set = None # pylint: disable=protected-access + self._is_present_in_parent = False + self._listener = message_listener_mod.NullMessageListener() + self._listener_for_children = _Listener(self) + for field_name, field_value in kwargs.items(): + field = _GetFieldByName(message_descriptor, field_name) + if field is None: + raise TypeError('%s() got an unexpected keyword argument "%s"' % + (message_descriptor.name, field_name)) + if field_value is None: + # field=None is the same as no field at all. + continue + if field.label == _FieldDescriptor.LABEL_REPEATED: + copy = field._default_constructor(self) + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite + if _IsMapField(field): + if _IsMessageMapField(field): + for key in field_value: + copy[key].MergeFrom(field_value[key]) + else: + copy.update(field_value) + else: + for val in field_value: + if isinstance(val, dict): + copy.add(**val) + else: + copy.add().MergeFrom(val) + else: # Scalar + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + field_value = [_GetIntegerEnumValue(field.enum_type, val) + for val in field_value] + copy.extend(field_value) + self._fields[field] = copy + elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + copy = field._default_constructor(self) + new_val = field_value + if isinstance(field_value, dict): + new_val = field.message_type._concrete_class(**field_value) + try: + copy.MergeFrom(new_val) + except TypeError: + _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) + self._fields[field] = copy + else: + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + field_value = _GetIntegerEnumValue(field.enum_type, field_value) + try: + setattr(self, field_name, field_value) + except TypeError: + _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) + + init.__module__ = None + init.__doc__ = None + cls.__init__ = init + + +def _GetFieldByName(message_descriptor, field_name): + """Returns a field descriptor by field name. + + Args: + message_descriptor: A Descriptor describing all fields in message. + field_name: The name of the field to retrieve. + Returns: + The field descriptor associated with the field name. + """ + try: + return message_descriptor.fields_by_name[field_name] + except KeyError: + raise ValueError('Protocol message %s has no "%s" field.' % + (message_descriptor.name, field_name)) + + +def _AddPropertiesForFields(descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + for field in descriptor.fields: + _AddPropertiesForField(field, cls) + + if descriptor.is_extendable: + # _ExtensionDict is just an adaptor with no state so we allocate a new one + # every time it is accessed. + cls.Extensions = property(lambda self: _ExtensionDict(self)) + + +def _AddPropertiesForField(field, cls): + """Adds a public property for a protocol message field. + Clients can use this property to get and (in the case + of non-repeated scalar fields) directly set the value + of a protocol message field. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + # Catch it if we add other types that we should + # handle specially here. + assert _FieldDescriptor.MAX_CPPTYPE == 10 + + constant_name = field.name.upper() + '_FIELD_NUMBER' + setattr(cls, constant_name, field.number) + + if field.label == _FieldDescriptor.LABEL_REPEATED: + _AddPropertiesForRepeatedField(field, cls) + elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + _AddPropertiesForNonRepeatedCompositeField(field, cls) + else: + _AddPropertiesForNonRepeatedScalarField(field, cls) + + +class _FieldProperty(property): + __slots__ = ('DESCRIPTOR',) + + def __init__(self, descriptor, getter, setter, doc): + property.__init__(self, getter, setter, doc=doc) + self.DESCRIPTOR = descriptor + + +def _AddPropertiesForRepeatedField(field, cls): + """Adds a public property for a "repeated" protocol message field. Clients + can use this property to get the value of the field, which will be either a + RepeatedScalarFieldContainer or RepeatedCompositeFieldContainer (see + below). + + Note that when clients add values to these containers, we perform + type-checking in the case of repeated scalar fields, and we also set any + necessary "has" bits as a side-effect. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + + def getter(self): + field_value = self._fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + field_value = self._fields.setdefault(field, field_value) + return field_value + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + # We define a setter just so we can throw an exception with a more + # helpful error message. + def setter(self, new_value): + raise AttributeError('Assignment not allowed to repeated field ' + '"%s" in protocol message object.' % proto_field_name) + + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForNonRepeatedScalarField(field, cls): + """Adds a public property for a nonrepeated, scalar protocol message field. + Clients can use this property to get and directly set the value of the field. + Note that when the client sets the value of a field by using this property, + all necessary "has" bits are set as a side-effect, and we also perform + type-checking. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + type_checker = type_checkers.GetTypeChecker(field) + default_value = field.default_value + is_proto3 = field.containing_type.syntax == 'proto3' + + def getter(self): + # TODO(protobuf-team): This may be broken since there may not be + # default_value. Combine with has_default_value somehow. + return self._fields.get(field, default_value) + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + clear_when_set_to_default = is_proto3 and not field.containing_oneof + + def field_setter(self, new_value): + # pylint: disable=protected-access + # Testing the value for truthiness captures all of the proto3 defaults + # (0, 0.0, enum 0, and False). + try: + new_value = type_checker.CheckValue(new_value) + except TypeError as e: + raise TypeError( + 'Cannot set %s to %.1024r: %s' % (field.full_name, new_value, e)) + if clear_when_set_to_default and not new_value: + self._fields.pop(field, None) + else: + self._fields[field] = new_value + # Check _cached_byte_size_dirty inline to improve performance, since scalar + # setters are called frequently. + if not self._cached_byte_size_dirty: + self._Modified() + + if field.containing_oneof: + def setter(self, new_value): + field_setter(self, new_value) + self._UpdateOneofState(field) + else: + setter = field_setter + + setter.__module__ = None + setter.__doc__ = 'Setter for %s.' % proto_field_name + + # Add a property to encapsulate the getter/setter. + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForNonRepeatedCompositeField(field, cls): + """Adds a public property for a nonrepeated, composite protocol message field. + A composite field is a "group" or "message" field. + + Clients can use this property to get the value of the field, but cannot + assign to the property directly. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + # TODO(robinson): Remove duplication with similar method + # for non-repeated scalars. + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + + def getter(self): + field_value = self._fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + field_value = self._fields.setdefault(field, field_value) + return field_value + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + # We define a setter just so we can throw an exception with a more + # helpful error message. + def setter(self, new_value): + raise AttributeError('Assignment not allowed to composite field ' + '"%s" in protocol message object.' % proto_field_name) + + # Add a property to encapsulate the getter. + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForExtensions(descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + extensions = descriptor.extensions_by_name + for extension_name, extension_field in extensions.items(): + constant_name = extension_name.upper() + '_FIELD_NUMBER' + setattr(cls, constant_name, extension_field.number) + + # TODO(amauryfa): Migrate all users of these attributes to functions like + # pool.FindExtensionByNumber(descriptor). + if descriptor.file is not None: + # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. + pool = descriptor.file.pool + cls._extensions_by_number = pool._extensions_by_number[descriptor] + cls._extensions_by_name = pool._extensions_by_name[descriptor] + +def _AddStaticMethods(cls): + # TODO(robinson): This probably needs to be thread-safe(?) + def RegisterExtension(extension_handle): + extension_handle.containing_type = cls.DESCRIPTOR + # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. + # pylint: disable=protected-access + cls.DESCRIPTOR.file.pool._AddExtensionDescriptor(extension_handle) + _AttachFieldHelpers(cls, extension_handle) + cls.RegisterExtension = staticmethod(RegisterExtension) + + def FromString(s): + message = cls() + message.MergeFromString(s) + return message + cls.FromString = staticmethod(FromString) + + +def _IsPresent(item): + """Given a (FieldDescriptor, value) tuple from _fields, return true if the + value should be included in the list returned by ListFields().""" + + if item[0].label == _FieldDescriptor.LABEL_REPEATED: + return bool(item[1]) + elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + return item[1]._is_present_in_parent + else: + return True + + +def _AddListFieldsMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def ListFields(self): + all_fields = [item for item in self._fields.items() if _IsPresent(item)] + all_fields.sort(key = lambda item: item[0].number) + return all_fields + + cls.ListFields = ListFields + +_PROTO3_ERROR_TEMPLATE = \ + ('Protocol message %s has no non-repeated submessage field "%s" ' + 'nor marked as optional') +_PROTO2_ERROR_TEMPLATE = 'Protocol message %s has no non-repeated field "%s"' + +def _AddHasFieldMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + is_proto3 = (message_descriptor.syntax == "proto3") + error_msg = _PROTO3_ERROR_TEMPLATE if is_proto3 else _PROTO2_ERROR_TEMPLATE + + hassable_fields = {} + for field in message_descriptor.fields: + if field.label == _FieldDescriptor.LABEL_REPEATED: + continue + # For proto3, only submessages and fields inside a oneof have presence. + if (is_proto3 and field.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE and + not field.containing_oneof): + continue + hassable_fields[field.name] = field + + # Has methods are supported for oneof descriptors. + for oneof in message_descriptor.oneofs: + hassable_fields[oneof.name] = oneof + + def HasField(self, field_name): + try: + field = hassable_fields[field_name] + except KeyError: + raise ValueError(error_msg % (message_descriptor.full_name, field_name)) + + if isinstance(field, descriptor_mod.OneofDescriptor): + try: + return HasField(self, self._oneofs[field].name) + except KeyError: + return False + else: + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + value = self._fields.get(field) + return value is not None and value._is_present_in_parent + else: + return field in self._fields + + cls.HasField = HasField + + +def _AddClearFieldMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def ClearField(self, field_name): + try: + field = message_descriptor.fields_by_name[field_name] + except KeyError: + try: + field = message_descriptor.oneofs_by_name[field_name] + if field in self._oneofs: + field = self._oneofs[field] + else: + return + except KeyError: + raise ValueError('Protocol message %s has no "%s" field.' % + (message_descriptor.name, field_name)) + + if field in self._fields: + # To match the C++ implementation, we need to invalidate iterators + # for map fields when ClearField() happens. + if hasattr(self._fields[field], 'InvalidateIterators'): + self._fields[field].InvalidateIterators() + + # Note: If the field is a sub-message, its listener will still point + # at us. That's fine, because the worst than can happen is that it + # will call _Modified() and invalidate our byte size. Big deal. + del self._fields[field] + + if self._oneofs.get(field.containing_oneof, None) is field: + del self._oneofs[field.containing_oneof] + + # Always call _Modified() -- even if nothing was changed, this is + # a mutating method, and thus calling it should cause the field to become + # present in the parent message. + self._Modified() + + cls.ClearField = ClearField + + +def _AddClearExtensionMethod(cls): + """Helper for _AddMessageMethods().""" + def ClearExtension(self, extension_handle): + extension_dict._VerifyExtensionHandle(self, extension_handle) + + # Similar to ClearField(), above. + if extension_handle in self._fields: + del self._fields[extension_handle] + self._Modified() + cls.ClearExtension = ClearExtension + + +def _AddHasExtensionMethod(cls): + """Helper for _AddMessageMethods().""" + def HasExtension(self, extension_handle): + extension_dict._VerifyExtensionHandle(self, extension_handle) + if extension_handle.label == _FieldDescriptor.LABEL_REPEATED: + raise KeyError('"%s" is repeated.' % extension_handle.full_name) + + if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + value = self._fields.get(extension_handle) + return value is not None and value._is_present_in_parent + else: + return extension_handle in self._fields + cls.HasExtension = HasExtension + +def _InternalUnpackAny(msg): + """Unpacks Any message and returns the unpacked message. + + This internal method is different from public Any Unpack method which takes + the target message as argument. _InternalUnpackAny method does not have + target message type and need to find the message type in descriptor pool. + + Args: + msg: An Any message to be unpacked. + + Returns: + The unpacked message. + """ + # TODO(amauryfa): Don't use the factory of generated messages. + # To make Any work with custom factories, use the message factory of the + # parent message. + # pylint: disable=g-import-not-at-top + from google.protobuf import symbol_database + factory = symbol_database.Default() + + type_url = msg.type_url + + if not type_url: + return None + + # TODO(haberman): For now we just strip the hostname. Better logic will be + # required. + type_name = type_url.split('/')[-1] + descriptor = factory.pool.FindMessageTypeByName(type_name) + + if descriptor is None: + return None + + message_class = factory.GetPrototype(descriptor) + message = message_class() + + message.ParseFromString(msg.value) + return message + + +def _AddEqualsMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __eq__(self, other): + if (not isinstance(other, message_mod.Message) or + other.DESCRIPTOR != self.DESCRIPTOR): + return False + + if self is other: + return True + + if self.DESCRIPTOR.full_name == _AnyFullTypeName: + any_a = _InternalUnpackAny(self) + any_b = _InternalUnpackAny(other) + if any_a and any_b: + return any_a == any_b + + if not self.ListFields() == other.ListFields(): + return False + + # TODO(jieluo): Fix UnknownFieldSet to consider MessageSet extensions, + # then use it for the comparison. + unknown_fields = list(self._unknown_fields) + unknown_fields.sort() + other_unknown_fields = list(other._unknown_fields) + other_unknown_fields.sort() + return unknown_fields == other_unknown_fields + + cls.__eq__ = __eq__ + + +def _AddStrMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __str__(self): + return text_format.MessageToString(self) + cls.__str__ = __str__ + + +def _AddReprMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __repr__(self): + return text_format.MessageToString(self) + cls.__repr__ = __repr__ + + +def _AddUnicodeMethod(unused_message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def __unicode__(self): + return text_format.MessageToString(self, as_utf8=True).decode('utf-8') + cls.__unicode__ = __unicode__ + + +def _BytesForNonRepeatedElement(value, field_number, field_type): + """Returns the number of bytes needed to serialize a non-repeated element. + The returned byte count includes space for tag information and any + other additional space associated with serializing value. + + Args: + value: Value we're serializing. + field_number: Field number of this value. (Since the field number + is stored as part of a varint-encoded tag, this has an impact + on the total bytes required to serialize the value). + field_type: The type of the field. One of the TYPE_* constants + within FieldDescriptor. + """ + try: + fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type] + return fn(field_number, value) + except KeyError: + raise message_mod.EncodeError('Unrecognized field type: %d' % field_type) + + +def _AddByteSizeMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def ByteSize(self): + if not self._cached_byte_size_dirty: + return self._cached_byte_size + + size = 0 + descriptor = self.DESCRIPTOR + if descriptor.GetOptions().map_entry: + # Fields of map entry should always be serialized. + size = descriptor.fields_by_name['key']._sizer(self.key) + size += descriptor.fields_by_name['value']._sizer(self.value) + else: + for field_descriptor, field_value in self.ListFields(): + size += field_descriptor._sizer(field_value) + for tag_bytes, value_bytes in self._unknown_fields: + size += len(tag_bytes) + len(value_bytes) + + self._cached_byte_size = size + self._cached_byte_size_dirty = False + self._listener_for_children.dirty = False + return size + + cls.ByteSize = ByteSize + + +def _AddSerializeToStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def SerializeToString(self, **kwargs): + # Check if the message has all of its required fields set. + if not self.IsInitialized(): + raise message_mod.EncodeError( + 'Message %s is missing required fields: %s' % ( + self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors()))) + return self.SerializePartialToString(**kwargs) + cls.SerializeToString = SerializeToString + + +def _AddSerializePartialToStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def SerializePartialToString(self, **kwargs): + out = BytesIO() + self._InternalSerialize(out.write, **kwargs) + return out.getvalue() + cls.SerializePartialToString = SerializePartialToString + + def InternalSerialize(self, write_bytes, deterministic=None): + if deterministic is None: + deterministic = ( + api_implementation.IsPythonDefaultSerializationDeterministic()) + else: + deterministic = bool(deterministic) + + descriptor = self.DESCRIPTOR + if descriptor.GetOptions().map_entry: + # Fields of map entry should always be serialized. + descriptor.fields_by_name['key']._encoder( + write_bytes, self.key, deterministic) + descriptor.fields_by_name['value']._encoder( + write_bytes, self.value, deterministic) + else: + for field_descriptor, field_value in self.ListFields(): + field_descriptor._encoder(write_bytes, field_value, deterministic) + for tag_bytes, value_bytes in self._unknown_fields: + write_bytes(tag_bytes) + write_bytes(value_bytes) + cls._InternalSerialize = InternalSerialize + + +def _AddMergeFromStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def MergeFromString(self, serialized): + serialized = memoryview(serialized) + length = len(serialized) + try: + if self._InternalParse(serialized, 0, length) != length: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise message_mod.DecodeError('Unexpected end-group tag.') + except (IndexError, TypeError): + # Now ord(buf[p:p+1]) == ord('') gets TypeError. + raise message_mod.DecodeError('Truncated message.') + except struct.error as e: + raise message_mod.DecodeError(e) + return length # Return this for legacy reasons. + cls.MergeFromString = MergeFromString + + local_ReadTag = decoder.ReadTag + local_SkipField = decoder.SkipField + decoders_by_tag = cls._decoders_by_tag + + def InternalParse(self, buffer, pos, end): + """Create a message from serialized bytes. + + Args: + self: Message, instance of the proto message object. + buffer: memoryview of the serialized data. + pos: int, position to start in the serialized data. + end: int, end position of the serialized data. + + Returns: + Message object. + """ + # Guard against internal misuse, since this function is called internally + # quite extensively, and its easy to accidentally pass bytes. + assert isinstance(buffer, memoryview) + self._Modified() + field_dict = self._fields + # pylint: disable=protected-access + unknown_field_set = self._unknown_field_set + while pos != end: + (tag_bytes, new_pos) = local_ReadTag(buffer, pos) + field_decoder, field_desc = decoders_by_tag.get(tag_bytes, (None, None)) + if field_decoder is None: + if not self._unknown_fields: # pylint: disable=protected-access + self._unknown_fields = [] # pylint: disable=protected-access + if unknown_field_set is None: + # pylint: disable=protected-access + self._unknown_field_set = containers.UnknownFieldSet() + # pylint: disable=protected-access + unknown_field_set = self._unknown_field_set + # pylint: disable=protected-access + (tag, _) = decoder._DecodeVarint(tag_bytes, 0) + field_number, wire_type = wire_format.UnpackTag(tag) + if field_number == 0: + raise message_mod.DecodeError('Field number 0 is illegal.') + # TODO(jieluo): remove old_pos. + old_pos = new_pos + (data, new_pos) = decoder._DecodeUnknownField( + buffer, new_pos, wire_type) # pylint: disable=protected-access + if new_pos == -1: + return pos + # pylint: disable=protected-access + unknown_field_set._add(field_number, wire_type, data) + # TODO(jieluo): remove _unknown_fields. + new_pos = local_SkipField(buffer, old_pos, end, tag_bytes) + if new_pos == -1: + return pos + self._unknown_fields.append( + (tag_bytes, buffer[old_pos:new_pos].tobytes())) + pos = new_pos + else: + pos = field_decoder(buffer, new_pos, end, self, field_dict) + if field_desc: + self._UpdateOneofState(field_desc) + return pos + cls._InternalParse = InternalParse + + +def _AddIsInitializedMethod(message_descriptor, cls): + """Adds the IsInitialized and FindInitializationError methods to the + protocol message class.""" + + required_fields = [field for field in message_descriptor.fields + if field.label == _FieldDescriptor.LABEL_REQUIRED] + + def IsInitialized(self, errors=None): + """Checks if all required fields of a message are set. + + Args: + errors: A list which, if provided, will be populated with the field + paths of all missing required fields. + + Returns: + True iff the specified message has all required fields set. + """ + + # Performance is critical so we avoid HasField() and ListFields(). + + for field in required_fields: + if (field not in self._fields or + (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and + not self._fields[field]._is_present_in_parent)): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + + for field, value in list(self._fields.items()): # dict can change size! + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if field.label == _FieldDescriptor.LABEL_REPEATED: + if (field.message_type.has_options and + field.message_type.GetOptions().map_entry): + continue + for element in value: + if not element.IsInitialized(): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + elif value._is_present_in_parent and not value.IsInitialized(): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + + return True + + cls.IsInitialized = IsInitialized + + def FindInitializationErrors(self): + """Finds required fields which are not initialized. + + Returns: + A list of strings. Each string is a path to an uninitialized field from + the top-level message, e.g. "foo.bar[5].baz". + """ + + errors = [] # simplify things + + for field in required_fields: + if not self.HasField(field.name): + errors.append(field.name) + + for field, value in self.ListFields(): + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if field.is_extension: + name = '(%s)' % field.full_name + else: + name = field.name + + if _IsMapField(field): + if _IsMessageMapField(field): + for key in value: + element = value[key] + prefix = '%s[%s].' % (name, key) + sub_errors = element.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + else: + # ScalarMaps can't have any initialization errors. + pass + elif field.label == _FieldDescriptor.LABEL_REPEATED: + for i in range(len(value)): + element = value[i] + prefix = '%s[%d].' % (name, i) + sub_errors = element.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + else: + prefix = name + '.' + sub_errors = value.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + + return errors + + cls.FindInitializationErrors = FindInitializationErrors + + +def _FullyQualifiedClassName(klass): + module = klass.__module__ + name = getattr(klass, '__qualname__', klass.__name__) + if module in (None, 'builtins', '__builtin__'): + return name + return module + '.' + name + + +def _AddMergeFromMethod(cls): + LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED + CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE + + def MergeFrom(self, msg): + if not isinstance(msg, cls): + raise TypeError( + 'Parameter to MergeFrom() must be instance of same class: ' + 'expected %s got %s.' % (_FullyQualifiedClassName(cls), + _FullyQualifiedClassName(msg.__class__))) + + assert msg is not self + self._Modified() + + fields = self._fields + + for field, value in msg._fields.items(): + if field.label == LABEL_REPEATED: + field_value = fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + fields[field] = field_value + field_value.MergeFrom(value) + elif field.cpp_type == CPPTYPE_MESSAGE: + if value._is_present_in_parent: + field_value = fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + fields[field] = field_value + field_value.MergeFrom(value) + else: + self._fields[field] = value + if field.containing_oneof: + self._UpdateOneofState(field) + + if msg._unknown_fields: + if not self._unknown_fields: + self._unknown_fields = [] + self._unknown_fields.extend(msg._unknown_fields) + # pylint: disable=protected-access + if self._unknown_field_set is None: + self._unknown_field_set = containers.UnknownFieldSet() + self._unknown_field_set._extend(msg._unknown_field_set) + + cls.MergeFrom = MergeFrom + + +def _AddWhichOneofMethod(message_descriptor, cls): + def WhichOneof(self, oneof_name): + """Returns the name of the currently set field inside a oneof, or None.""" + try: + field = message_descriptor.oneofs_by_name[oneof_name] + except KeyError: + raise ValueError( + 'Protocol message has no oneof "%s" field.' % oneof_name) + + nested_field = self._oneofs.get(field, None) + if nested_field is not None and self.HasField(nested_field.name): + return nested_field.name + else: + return None + + cls.WhichOneof = WhichOneof + + +def _Clear(self): + # Clear fields. + self._fields = {} + self._unknown_fields = () + # pylint: disable=protected-access + if self._unknown_field_set is not None: + self._unknown_field_set._clear() + self._unknown_field_set = None + + self._oneofs = {} + self._Modified() + + +def _UnknownFields(self): + if self._unknown_field_set is None: # pylint: disable=protected-access + # pylint: disable=protected-access + self._unknown_field_set = containers.UnknownFieldSet() + return self._unknown_field_set # pylint: disable=protected-access + + +def _DiscardUnknownFields(self): + self._unknown_fields = [] + self._unknown_field_set = None # pylint: disable=protected-access + for field, value in self.ListFields(): + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if _IsMapField(field): + if _IsMessageMapField(field): + for key in value: + value[key].DiscardUnknownFields() + elif field.label == _FieldDescriptor.LABEL_REPEATED: + for sub_message in value: + sub_message.DiscardUnknownFields() + else: + value.DiscardUnknownFields() + + +def _SetListener(self, listener): + if listener is None: + self._listener = message_listener_mod.NullMessageListener() + else: + self._listener = listener + + +def _AddMessageMethods(message_descriptor, cls): + """Adds implementations of all Message methods to cls.""" + _AddListFieldsMethod(message_descriptor, cls) + _AddHasFieldMethod(message_descriptor, cls) + _AddClearFieldMethod(message_descriptor, cls) + if message_descriptor.is_extendable: + _AddClearExtensionMethod(cls) + _AddHasExtensionMethod(cls) + _AddEqualsMethod(message_descriptor, cls) + _AddStrMethod(message_descriptor, cls) + _AddReprMethod(message_descriptor, cls) + _AddUnicodeMethod(message_descriptor, cls) + _AddByteSizeMethod(message_descriptor, cls) + _AddSerializeToStringMethod(message_descriptor, cls) + _AddSerializePartialToStringMethod(message_descriptor, cls) + _AddMergeFromStringMethod(message_descriptor, cls) + _AddIsInitializedMethod(message_descriptor, cls) + _AddMergeFromMethod(cls) + _AddWhichOneofMethod(message_descriptor, cls) + # Adds methods which do not depend on cls. + cls.Clear = _Clear + cls.UnknownFields = _UnknownFields + cls.DiscardUnknownFields = _DiscardUnknownFields + cls._SetListener = _SetListener + + +def _AddPrivateHelperMethods(message_descriptor, cls): + """Adds implementation of private helper methods to cls.""" + + def Modified(self): + """Sets the _cached_byte_size_dirty bit to true, + and propagates this to our listener iff this was a state change. + """ + + # Note: Some callers check _cached_byte_size_dirty before calling + # _Modified() as an extra optimization. So, if this method is ever + # changed such that it does stuff even when _cached_byte_size_dirty is + # already true, the callers need to be updated. + if not self._cached_byte_size_dirty: + self._cached_byte_size_dirty = True + self._listener_for_children.dirty = True + self._is_present_in_parent = True + self._listener.Modified() + + def _UpdateOneofState(self, field): + """Sets field as the active field in its containing oneof. + + Will also delete currently active field in the oneof, if it is different + from the argument. Does not mark the message as modified. + """ + other_field = self._oneofs.setdefault(field.containing_oneof, field) + if other_field is not field: + del self._fields[other_field] + self._oneofs[field.containing_oneof] = field + + cls._Modified = Modified + cls.SetInParent = Modified + cls._UpdateOneofState = _UpdateOneofState + + +class _Listener(object): + + """MessageListener implementation that a parent message registers with its + child message. + + In order to support semantics like: + + foo.bar.baz.qux = 23 + assert foo.HasField('bar') + + ...child objects must have back references to their parents. + This helper class is at the heart of this support. + """ + + def __init__(self, parent_message): + """Args: + parent_message: The message whose _Modified() method we should call when + we receive Modified() messages. + """ + # This listener establishes a back reference from a child (contained) object + # to its parent (containing) object. We make this a weak reference to avoid + # creating cyclic garbage when the client finishes with the 'parent' object + # in the tree. + if isinstance(parent_message, weakref.ProxyType): + self._parent_message_weakref = parent_message + else: + self._parent_message_weakref = weakref.proxy(parent_message) + + # As an optimization, we also indicate directly on the listener whether + # or not the parent message is dirty. This way we can avoid traversing + # up the tree in the common case. + self.dirty = False + + def Modified(self): + if self.dirty: + return + try: + # Propagate the signal to our parents iff this is the first field set. + self._parent_message_weakref._Modified() + except ReferenceError: + # We can get here if a client has kept a reference to a child object, + # and is now setting a field on it, but the child's parent has been + # garbage-collected. This is not an error. + pass + + +class _OneofListener(_Listener): + """Special listener implementation for setting composite oneof fields.""" + + def __init__(self, parent_message, field): + """Args: + parent_message: The message whose _Modified() method we should call when + we receive Modified() messages. + field: The descriptor of the field being set in the parent message. + """ + super(_OneofListener, self).__init__(parent_message) + self._field = field + + def Modified(self): + """Also updates the state of the containing oneof in the parent message.""" + try: + self._parent_message_weakref._UpdateOneofState(self._field) + super(_OneofListener, self).Modified() + except ReferenceError: + pass diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/type_checkers.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/type_checkers.py new file mode 100644 index 0000000000..a53e71fe8e --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/type_checkers.py @@ -0,0 +1,435 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides type checking routines. + +This module defines type checking utilities in the forms of dictionaries: + +VALUE_CHECKERS: A dictionary of field types and a value validation object. +TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing + function. +TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization + function. +FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their + corresponding wire types. +TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization + function. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import ctypes +import numbers + +from google.protobuf.internal import decoder +from google.protobuf.internal import encoder +from google.protobuf.internal import wire_format +from google.protobuf import descriptor + +_FieldDescriptor = descriptor.FieldDescriptor + + +def TruncateToFourByteFloat(original): + return ctypes.c_float(original).value + + +def ToShortestFloat(original): + """Returns the shortest float that has same value in wire.""" + # All 4 byte floats have between 6 and 9 significant digits, so we + # start with 6 as the lower bound. + # It has to be iterative because use '.9g' directly can not get rid + # of the noises for most values. For example if set a float_field=0.9 + # use '.9g' will print 0.899999976. + precision = 6 + rounded = float('{0:.{1}g}'.format(original, precision)) + while TruncateToFourByteFloat(rounded) != original: + precision += 1 + rounded = float('{0:.{1}g}'.format(original, precision)) + return rounded + + +def SupportsOpenEnums(field_descriptor): + return field_descriptor.containing_type.syntax == 'proto3' + + +def GetTypeChecker(field): + """Returns a type checker for a message field of the specified types. + + Args: + field: FieldDescriptor object for this field. + + Returns: + An instance of TypeChecker which can be used to verify the types + of values assigned to a field of the specified type. + """ + if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and + field.type == _FieldDescriptor.TYPE_STRING): + return UnicodeValueChecker() + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + if SupportsOpenEnums(field): + # When open enums are supported, any int32 can be assigned. + return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32] + else: + return EnumValueChecker(field.enum_type) + return _VALUE_CHECKERS[field.cpp_type] + + +# None of the typecheckers below make any attempt to guard against people +# subclassing builtin types and doing weird things. We're not trying to +# protect against malicious clients here, just people accidentally shooting +# themselves in the foot in obvious ways. +class TypeChecker(object): + + """Type checker used to catch type errors as early as possible + when the client is setting scalar fields in protocol messages. + """ + + def __init__(self, *acceptable_types): + self._acceptable_types = acceptable_types + + def CheckValue(self, proposed_value): + """Type check the provided value and return it. + + The returned value might have been normalized to another type. + """ + if not isinstance(proposed_value, self._acceptable_types): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), self._acceptable_types)) + raise TypeError(message) + return proposed_value + + +class TypeCheckerWithDefault(TypeChecker): + + def __init__(self, default_value, *acceptable_types): + TypeChecker.__init__(self, *acceptable_types) + self._default_value = default_value + + def DefaultValue(self): + return self._default_value + + +class BoolValueChecker(object): + """Type checker used for bool fields.""" + + def CheckValue(self, proposed_value): + if not hasattr(proposed_value, '__index__') or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (bool, int))) + raise TypeError(message) + return bool(proposed_value) + + def DefaultValue(self): + return False + + +# IntValueChecker and its subclasses perform integer type-checks +# and bounds-checks. +class IntValueChecker(object): + + """Checker used for integer fields. Performs type-check and range check.""" + + def CheckValue(self, proposed_value): + if not hasattr(proposed_value, '__index__') or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (int,))) + raise TypeError(message) + + if not self._MIN <= int(proposed_value) <= self._MAX: + raise ValueError('Value out of range: %d' % proposed_value) + # We force all values to int to make alternate implementations where the + # distinction is more significant (e.g. the C++ implementation) simpler. + proposed_value = int(proposed_value) + return proposed_value + + def DefaultValue(self): + return 0 + + +class EnumValueChecker(object): + + """Checker used for enum fields. Performs type-check and range check.""" + + def __init__(self, enum_type): + self._enum_type = enum_type + + def CheckValue(self, proposed_value): + if not isinstance(proposed_value, numbers.Integral): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (int,))) + raise TypeError(message) + if int(proposed_value) not in self._enum_type.values_by_number: + raise ValueError('Unknown enum value: %d' % proposed_value) + return proposed_value + + def DefaultValue(self): + return self._enum_type.values[0].number + + +class UnicodeValueChecker(object): + + """Checker used for string fields. + + Always returns a unicode value, even if the input is of type str. + """ + + def CheckValue(self, proposed_value): + if not isinstance(proposed_value, (bytes, str)): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (bytes, str))) + raise TypeError(message) + + # If the value is of type 'bytes' make sure that it is valid UTF-8 data. + if isinstance(proposed_value, bytes): + try: + proposed_value = proposed_value.decode('utf-8') + except UnicodeDecodeError: + raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 ' + 'encoding. Non-UTF-8 strings must be converted to ' + 'unicode objects before being added.' % + (proposed_value)) + else: + try: + proposed_value.encode('utf8') + except UnicodeEncodeError: + raise ValueError('%.1024r isn\'t a valid unicode string and ' + 'can\'t be encoded in UTF-8.'% + (proposed_value)) + + return proposed_value + + def DefaultValue(self): + return u"" + + +class Int32ValueChecker(IntValueChecker): + # We're sure to use ints instead of longs here since comparison may be more + # efficient. + _MIN = -2147483648 + _MAX = 2147483647 + + +class Uint32ValueChecker(IntValueChecker): + _MIN = 0 + _MAX = (1 << 32) - 1 + + +class Int64ValueChecker(IntValueChecker): + _MIN = -(1 << 63) + _MAX = (1 << 63) - 1 + + +class Uint64ValueChecker(IntValueChecker): + _MIN = 0 + _MAX = (1 << 64) - 1 + + +# The max 4 bytes float is about 3.4028234663852886e+38 +_FLOAT_MAX = float.fromhex('0x1.fffffep+127') +_FLOAT_MIN = -_FLOAT_MAX +_INF = float('inf') +_NEG_INF = float('-inf') + + +class DoubleValueChecker(object): + """Checker used for double fields. + + Performs type-check and range check. + """ + + def CheckValue(self, proposed_value): + """Check and convert proposed_value to float.""" + if (not hasattr(proposed_value, '__float__') and + not hasattr(proposed_value, '__index__')) or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: int, float' % + (proposed_value, type(proposed_value))) + raise TypeError(message) + return float(proposed_value) + + def DefaultValue(self): + return 0.0 + + +class FloatValueChecker(DoubleValueChecker): + """Checker used for float fields. + + Performs type-check and range check. + + Values exceeding a 32-bit float will be converted to inf/-inf. + """ + + def CheckValue(self, proposed_value): + """Check and convert proposed_value to float.""" + converted_value = super().CheckValue(proposed_value) + # This inf rounding matches the C++ proto SafeDoubleToFloat logic. + if converted_value > _FLOAT_MAX: + return _INF + if converted_value < _FLOAT_MIN: + return _NEG_INF + + return TruncateToFourByteFloat(converted_value) + +# Type-checkers for all scalar CPPTYPEs. +_VALUE_CHECKERS = { + _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(), + _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), + _FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(), + _FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(), + _FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(), + _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes), +} + + +# Map from field type to a function F, such that F(field_num, value) +# gives the total byte size for a value of the given type. This +# byte size includes tag information and any other additional space +# associated with serializing "value". +TYPE_TO_BYTE_SIZE_FN = { + _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize, + _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize, + _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize, + _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize, + _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize, + _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize, + _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize, + _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize, + _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize, + _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize, + _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize, + _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize, + _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize, + _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize, + _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize, + _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize, + _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize, + _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize + } + + +# Maps from field types to encoder constructors. +TYPE_TO_ENCODER = { + _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder, + _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder, + _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder, + _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder, + _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder, + _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder, + _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder, + _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder, + _FieldDescriptor.TYPE_STRING: encoder.StringEncoder, + _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder, + _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder, + _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder, + _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder, + _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder, + _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder, + _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder, + _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder, + _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder, + } + + +# Maps from field types to sizer constructors. +TYPE_TO_SIZER = { + _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer, + _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer, + _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer, + _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer, + _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer, + _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer, + _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer, + _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer, + _FieldDescriptor.TYPE_STRING: encoder.StringSizer, + _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer, + _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer, + _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer, + _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer, + _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer, + _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer, + _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer, + _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer, + _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer, + } + + +# Maps from field type to a decoder constructor. +TYPE_TO_DECODER = { + _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder, + _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder, + _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder, + _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder, + _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder, + _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder, + _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder, + _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder, + _FieldDescriptor.TYPE_STRING: decoder.StringDecoder, + _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder, + _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder, + _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder, + _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder, + _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder, + _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder, + _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder, + _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder, + _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder, + } + +# Maps from field type to expected wiretype. +FIELD_TYPE_TO_WIRE_TYPE = { + _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_STRING: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP, + _FieldDescriptor.TYPE_MESSAGE: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_BYTES: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT, + } diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/well_known_types.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/well_known_types.py new file mode 100644 index 0000000000..b581ab750a --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/well_known_types.py @@ -0,0 +1,878 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains well known classes. + +This files defines well known classes which need extra maintenance including: + - Any + - Duration + - FieldMask + - Struct + - Timestamp +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + +import calendar +import collections.abc +import datetime + +from google.protobuf.descriptor import FieldDescriptor + +_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' +_NANOS_PER_SECOND = 1000000000 +_NANOS_PER_MILLISECOND = 1000000 +_NANOS_PER_MICROSECOND = 1000 +_MILLIS_PER_SECOND = 1000 +_MICROS_PER_SECOND = 1000000 +_SECONDS_PER_DAY = 24 * 3600 +_DURATION_SECONDS_MAX = 315576000000 + + +class Any(object): + """Class for Any Message type.""" + + __slots__ = () + + def Pack(self, msg, type_url_prefix='type.googleapis.com/', + deterministic=None): + """Packs the specified message into current Any message.""" + if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/': + self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) + else: + self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) + self.value = msg.SerializeToString(deterministic=deterministic) + + def Unpack(self, msg): + """Unpacks the current Any message into specified message.""" + descriptor = msg.DESCRIPTOR + if not self.Is(descriptor): + return False + msg.ParseFromString(self.value) + return True + + def TypeName(self): + """Returns the protobuf type name of the inner message.""" + # Only last part is to be used: b/25630112 + return self.type_url.split('/')[-1] + + def Is(self, descriptor): + """Checks if this Any represents the given protobuf type.""" + return '/' in self.type_url and self.TypeName() == descriptor.full_name + + +_EPOCH_DATETIME_NAIVE = datetime.datetime.utcfromtimestamp(0) +_EPOCH_DATETIME_AWARE = datetime.datetime.fromtimestamp( + 0, tz=datetime.timezone.utc) + + +class Timestamp(object): + """Class for Timestamp message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts Timestamp to RFC 3339 date string format. + + Returns: + A string converted from timestamp. The string is always Z-normalized + and uses 3, 6 or 9 fractional digits as required to represent the + exact time. Example of the return format: '1972-01-01T10:00:20.021Z' + """ + nanos = self.nanos % _NANOS_PER_SECOND + total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND + seconds = total_sec % _SECONDS_PER_DAY + days = (total_sec - seconds) // _SECONDS_PER_DAY + dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(days, seconds) + + result = dt.isoformat() + if (nanos % 1e9) == 0: + # If there are 0 fractional digits, the fractional + # point '.' should be omitted when serializing. + return result + 'Z' + if (nanos % 1e6) == 0: + # Serialize 3 fractional digits. + return result + '.%03dZ' % (nanos / 1e6) + if (nanos % 1e3) == 0: + # Serialize 6 fractional digits. + return result + '.%06dZ' % (nanos / 1e3) + # Serialize 9 fractional digits. + return result + '.%09dZ' % nanos + + def FromJsonString(self, value): + """Parse a RFC 3339 date string format to Timestamp. + + Args: + value: A date string. Any fractional digits (or none) and any offset are + accepted as long as they fit into nano-seconds precision. + Example of accepted format: '1972-01-01T10:00:20.021-05:00' + + Raises: + ValueError: On parsing problems. + """ + if not isinstance(value, str): + raise ValueError('Timestamp JSON value not a string: {!r}'.format(value)) + timezone_offset = value.find('Z') + if timezone_offset == -1: + timezone_offset = value.find('+') + if timezone_offset == -1: + timezone_offset = value.rfind('-') + if timezone_offset == -1: + raise ValueError( + 'Failed to parse timestamp: missing valid timezone offset.') + time_value = value[0:timezone_offset] + # Parse datetime and nanos. + point_position = time_value.find('.') + if point_position == -1: + second_value = time_value + nano_value = '' + else: + second_value = time_value[:point_position] + nano_value = time_value[point_position + 1:] + if 't' in second_value: + raise ValueError( + 'time data \'{0}\' does not match format \'%Y-%m-%dT%H:%M:%S\', ' + 'lowercase \'t\' is not accepted'.format(second_value)) + date_object = datetime.datetime.strptime(second_value, _TIMESTAMPFOMAT) + td = date_object - datetime.datetime(1970, 1, 1) + seconds = td.seconds + td.days * _SECONDS_PER_DAY + if len(nano_value) > 9: + raise ValueError( + 'Failed to parse Timestamp: nanos {0} more than ' + '9 fractional digits.'.format(nano_value)) + if nano_value: + nanos = round(float('0.' + nano_value) * 1e9) + else: + nanos = 0 + # Parse timezone offsets. + if value[timezone_offset] == 'Z': + if len(value) != timezone_offset + 1: + raise ValueError('Failed to parse timestamp: invalid trailing' + ' data {0}.'.format(value)) + else: + timezone = value[timezone_offset:] + pos = timezone.find(':') + if pos == -1: + raise ValueError( + 'Invalid timezone offset value: {0}.'.format(timezone)) + if timezone[0] == '+': + seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 + else: + seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 + # Set seconds and nanos + self.seconds = int(seconds) + self.nanos = int(nanos) + + def GetCurrentTime(self): + """Get the current UTC into Timestamp.""" + self.FromDatetime(datetime.datetime.utcnow()) + + def ToNanoseconds(self): + """Converts Timestamp to nanoseconds since epoch.""" + return self.seconds * _NANOS_PER_SECOND + self.nanos + + def ToMicroseconds(self): + """Converts Timestamp to microseconds since epoch.""" + return (self.seconds * _MICROS_PER_SECOND + + self.nanos // _NANOS_PER_MICROSECOND) + + def ToMilliseconds(self): + """Converts Timestamp to milliseconds since epoch.""" + return (self.seconds * _MILLIS_PER_SECOND + + self.nanos // _NANOS_PER_MILLISECOND) + + def ToSeconds(self): + """Converts Timestamp to seconds since epoch.""" + return self.seconds + + def FromNanoseconds(self, nanos): + """Converts nanoseconds since epoch to Timestamp.""" + self.seconds = nanos // _NANOS_PER_SECOND + self.nanos = nanos % _NANOS_PER_SECOND + + def FromMicroseconds(self, micros): + """Converts microseconds since epoch to Timestamp.""" + self.seconds = micros // _MICROS_PER_SECOND + self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND + + def FromMilliseconds(self, millis): + """Converts milliseconds since epoch to Timestamp.""" + self.seconds = millis // _MILLIS_PER_SECOND + self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND + + def FromSeconds(self, seconds): + """Converts seconds since epoch to Timestamp.""" + self.seconds = seconds + self.nanos = 0 + + def ToDatetime(self, tzinfo=None): + """Converts Timestamp to a datetime. + + Args: + tzinfo: A datetime.tzinfo subclass; defaults to None. + + Returns: + If tzinfo is None, returns a timezone-naive UTC datetime (with no timezone + information, i.e. not aware that it's UTC). + + Otherwise, returns a timezone-aware datetime in the input timezone. + """ + delta = datetime.timedelta( + seconds=self.seconds, + microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)) + if tzinfo is None: + return _EPOCH_DATETIME_NAIVE + delta + else: + return _EPOCH_DATETIME_AWARE.astimezone(tzinfo) + delta + + def FromDatetime(self, dt): + """Converts datetime to Timestamp. + + Args: + dt: A datetime. If it's timezone-naive, it's assumed to be in UTC. + """ + # Using this guide: http://wiki.python.org/moin/WorkingWithTime + # And this conversion guide: http://docs.python.org/library/time.html + + # Turn the date parameter into a tuple (struct_time) that can then be + # manipulated into a long value of seconds. During the conversion from + # struct_time to long, the source date in UTC, and so it follows that the + # correct transformation is calendar.timegm() + self.seconds = calendar.timegm(dt.utctimetuple()) + self.nanos = dt.microsecond * _NANOS_PER_MICROSECOND + + +class Duration(object): + """Class for Duration message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts Duration to string format. + + Returns: + A string converted from self. The string format will contains + 3, 6, or 9 fractional digits depending on the precision required to + represent the exact Duration value. For example: "1s", "1.010s", + "1.000000100s", "-3.100s" + """ + _CheckDurationValid(self.seconds, self.nanos) + if self.seconds < 0 or self.nanos < 0: + result = '-' + seconds = - self.seconds + int((0 - self.nanos) // 1e9) + nanos = (0 - self.nanos) % 1e9 + else: + result = '' + seconds = self.seconds + int(self.nanos // 1e9) + nanos = self.nanos % 1e9 + result += '%d' % seconds + if (nanos % 1e9) == 0: + # If there are 0 fractional digits, the fractional + # point '.' should be omitted when serializing. + return result + 's' + if (nanos % 1e6) == 0: + # Serialize 3 fractional digits. + return result + '.%03ds' % (nanos / 1e6) + if (nanos % 1e3) == 0: + # Serialize 6 fractional digits. + return result + '.%06ds' % (nanos / 1e3) + # Serialize 9 fractional digits. + return result + '.%09ds' % nanos + + def FromJsonString(self, value): + """Converts a string to Duration. + + Args: + value: A string to be converted. The string must end with 's'. Any + fractional digits (or none) are accepted as long as they fit into + precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s + + Raises: + ValueError: On parsing problems. + """ + if not isinstance(value, str): + raise ValueError('Duration JSON value not a string: {!r}'.format(value)) + if len(value) < 1 or value[-1] != 's': + raise ValueError( + 'Duration must end with letter "s": {0}.'.format(value)) + try: + pos = value.find('.') + if pos == -1: + seconds = int(value[:-1]) + nanos = 0 + else: + seconds = int(value[:pos]) + if value[0] == '-': + nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9)) + else: + nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9)) + _CheckDurationValid(seconds, nanos) + self.seconds = seconds + self.nanos = nanos + except ValueError as e: + raise ValueError( + 'Couldn\'t parse duration: {0} : {1}.'.format(value, e)) + + def ToNanoseconds(self): + """Converts a Duration to nanoseconds.""" + return self.seconds * _NANOS_PER_SECOND + self.nanos + + def ToMicroseconds(self): + """Converts a Duration to microseconds.""" + micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND) + return self.seconds * _MICROS_PER_SECOND + micros + + def ToMilliseconds(self): + """Converts a Duration to milliseconds.""" + millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND) + return self.seconds * _MILLIS_PER_SECOND + millis + + def ToSeconds(self): + """Converts a Duration to seconds.""" + return self.seconds + + def FromNanoseconds(self, nanos): + """Converts nanoseconds to Duration.""" + self._NormalizeDuration(nanos // _NANOS_PER_SECOND, + nanos % _NANOS_PER_SECOND) + + def FromMicroseconds(self, micros): + """Converts microseconds to Duration.""" + self._NormalizeDuration( + micros // _MICROS_PER_SECOND, + (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND) + + def FromMilliseconds(self, millis): + """Converts milliseconds to Duration.""" + self._NormalizeDuration( + millis // _MILLIS_PER_SECOND, + (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND) + + def FromSeconds(self, seconds): + """Converts seconds to Duration.""" + self.seconds = seconds + self.nanos = 0 + + def ToTimedelta(self): + """Converts Duration to timedelta.""" + return datetime.timedelta( + seconds=self.seconds, microseconds=_RoundTowardZero( + self.nanos, _NANOS_PER_MICROSECOND)) + + def FromTimedelta(self, td): + """Converts timedelta to Duration.""" + self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY, + td.microseconds * _NANOS_PER_MICROSECOND) + + def _NormalizeDuration(self, seconds, nanos): + """Set Duration by seconds and nanos.""" + # Force nanos to be negative if the duration is negative. + if seconds < 0 and nanos > 0: + seconds += 1 + nanos -= _NANOS_PER_SECOND + self.seconds = seconds + self.nanos = nanos + + +def _CheckDurationValid(seconds, nanos): + if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX: + raise ValueError( + 'Duration is not valid: Seconds {0} must be in range ' + '[-315576000000, 315576000000].'.format(seconds)) + if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND: + raise ValueError( + 'Duration is not valid: Nanos {0} must be in range ' + '[-999999999, 999999999].'.format(nanos)) + if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0): + raise ValueError( + 'Duration is not valid: Sign mismatch.') + + +def _RoundTowardZero(value, divider): + """Truncates the remainder part after division.""" + # For some languages, the sign of the remainder is implementation + # dependent if any of the operands is negative. Here we enforce + # "rounded toward zero" semantics. For example, for (-5) / 2 an + # implementation may give -3 as the result with the remainder being + # 1. This function ensures we always return -2 (closer to zero). + result = value // divider + remainder = value % divider + if result < 0 and remainder > 0: + return result + 1 + else: + return result + + +class FieldMask(object): + """Class for FieldMask message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts FieldMask to string according to proto3 JSON spec.""" + camelcase_paths = [] + for path in self.paths: + camelcase_paths.append(_SnakeCaseToCamelCase(path)) + return ','.join(camelcase_paths) + + def FromJsonString(self, value): + """Converts string to FieldMask according to proto3 JSON spec.""" + if not isinstance(value, str): + raise ValueError('FieldMask JSON value not a string: {!r}'.format(value)) + self.Clear() + if value: + for path in value.split(','): + self.paths.append(_CamelCaseToSnakeCase(path)) + + def IsValidForDescriptor(self, message_descriptor): + """Checks whether the FieldMask is valid for Message Descriptor.""" + for path in self.paths: + if not _IsValidPath(message_descriptor, path): + return False + return True + + def AllFieldsFromDescriptor(self, message_descriptor): + """Gets all direct fields of Message Descriptor to FieldMask.""" + self.Clear() + for field in message_descriptor.fields: + self.paths.append(field.name) + + def CanonicalFormFromMask(self, mask): + """Converts a FieldMask to the canonical form. + + Removes paths that are covered by another path. For example, + "foo.bar" is covered by "foo" and will be removed if "foo" + is also in the FieldMask. Then sorts all paths in alphabetical order. + + Args: + mask: The original FieldMask to be converted. + """ + tree = _FieldMaskTree(mask) + tree.ToFieldMask(self) + + def Union(self, mask1, mask2): + """Merges mask1 and mask2 into this FieldMask.""" + _CheckFieldMaskMessage(mask1) + _CheckFieldMaskMessage(mask2) + tree = _FieldMaskTree(mask1) + tree.MergeFromFieldMask(mask2) + tree.ToFieldMask(self) + + def Intersect(self, mask1, mask2): + """Intersects mask1 and mask2 into this FieldMask.""" + _CheckFieldMaskMessage(mask1) + _CheckFieldMaskMessage(mask2) + tree = _FieldMaskTree(mask1) + intersection = _FieldMaskTree() + for path in mask2.paths: + tree.IntersectPath(path, intersection) + intersection.ToFieldMask(self) + + def MergeMessage( + self, source, destination, + replace_message_field=False, replace_repeated_field=False): + """Merges fields specified in FieldMask from source to destination. + + Args: + source: Source message. + destination: The destination message to be merged into. + replace_message_field: Replace message field if True. Merge message + field if False. + replace_repeated_field: Replace repeated field if True. Append + elements of repeated field if False. + """ + tree = _FieldMaskTree(self) + tree.MergeMessage( + source, destination, replace_message_field, replace_repeated_field) + + +def _IsValidPath(message_descriptor, path): + """Checks whether the path is valid for Message Descriptor.""" + parts = path.split('.') + last = parts.pop() + for name in parts: + field = message_descriptor.fields_by_name.get(name) + if (field is None or + field.label == FieldDescriptor.LABEL_REPEATED or + field.type != FieldDescriptor.TYPE_MESSAGE): + return False + message_descriptor = field.message_type + return last in message_descriptor.fields_by_name + + +def _CheckFieldMaskMessage(message): + """Raises ValueError if message is not a FieldMask.""" + message_descriptor = message.DESCRIPTOR + if (message_descriptor.name != 'FieldMask' or + message_descriptor.file.name != 'google/protobuf/field_mask.proto'): + raise ValueError('Message {0} is not a FieldMask.'.format( + message_descriptor.full_name)) + + +def _SnakeCaseToCamelCase(path_name): + """Converts a path name from snake_case to camelCase.""" + result = [] + after_underscore = False + for c in path_name: + if c.isupper(): + raise ValueError( + 'Fail to print FieldMask to Json string: Path name ' + '{0} must not contain uppercase letters.'.format(path_name)) + if after_underscore: + if c.islower(): + result.append(c.upper()) + after_underscore = False + else: + raise ValueError( + 'Fail to print FieldMask to Json string: The ' + 'character after a "_" must be a lowercase letter ' + 'in path name {0}.'.format(path_name)) + elif c == '_': + after_underscore = True + else: + result += c + + if after_underscore: + raise ValueError('Fail to print FieldMask to Json string: Trailing "_" ' + 'in path name {0}.'.format(path_name)) + return ''.join(result) + + +def _CamelCaseToSnakeCase(path_name): + """Converts a field name from camelCase to snake_case.""" + result = [] + for c in path_name: + if c == '_': + raise ValueError('Fail to parse FieldMask: Path name ' + '{0} must not contain "_"s.'.format(path_name)) + if c.isupper(): + result += '_' + result += c.lower() + else: + result += c + return ''.join(result) + + +class _FieldMaskTree(object): + """Represents a FieldMask in a tree structure. + + For example, given a FieldMask "foo.bar,foo.baz,bar.baz", + the FieldMaskTree will be: + [_root] -+- foo -+- bar + | | + | +- baz + | + +- bar --- baz + In the tree, each leaf node represents a field path. + """ + + __slots__ = ('_root',) + + def __init__(self, field_mask=None): + """Initializes the tree by FieldMask.""" + self._root = {} + if field_mask: + self.MergeFromFieldMask(field_mask) + + def MergeFromFieldMask(self, field_mask): + """Merges a FieldMask to the tree.""" + for path in field_mask.paths: + self.AddPath(path) + + def AddPath(self, path): + """Adds a field path into the tree. + + If the field path to add is a sub-path of an existing field path + in the tree (i.e., a leaf node), it means the tree already matches + the given path so nothing will be added to the tree. If the path + matches an existing non-leaf node in the tree, that non-leaf node + will be turned into a leaf node with all its children removed because + the path matches all the node's children. Otherwise, a new path will + be added. + + Args: + path: The field path to add. + """ + node = self._root + for name in path.split('.'): + if name not in node: + node[name] = {} + elif not node[name]: + # Pre-existing empty node implies we already have this entire tree. + return + node = node[name] + # Remove any sub-trees we might have had. + node.clear() + + def ToFieldMask(self, field_mask): + """Converts the tree to a FieldMask.""" + field_mask.Clear() + _AddFieldPaths(self._root, '', field_mask) + + def IntersectPath(self, path, intersection): + """Calculates the intersection part of a field path with this tree. + + Args: + path: The field path to calculates. + intersection: The out tree to record the intersection part. + """ + node = self._root + for name in path.split('.'): + if name not in node: + return + elif not node[name]: + intersection.AddPath(path) + return + node = node[name] + intersection.AddLeafNodes(path, node) + + def AddLeafNodes(self, prefix, node): + """Adds leaf nodes begin with prefix to this tree.""" + if not node: + self.AddPath(prefix) + for name in node: + child_path = prefix + '.' + name + self.AddLeafNodes(child_path, node[name]) + + def MergeMessage( + self, source, destination, + replace_message, replace_repeated): + """Merge all fields specified by this tree from source to destination.""" + _MergeMessage( + self._root, source, destination, replace_message, replace_repeated) + + +def _StrConvert(value): + """Converts value to str if it is not.""" + # This file is imported by c extension and some methods like ClearField + # requires string for the field name. py2/py3 has different text + # type and may use unicode. + if not isinstance(value, str): + return value.encode('utf-8') + return value + + +def _MergeMessage( + node, source, destination, replace_message, replace_repeated): + """Merge all fields specified by a sub-tree from source to destination.""" + source_descriptor = source.DESCRIPTOR + for name in node: + child = node[name] + field = source_descriptor.fields_by_name[name] + if field is None: + raise ValueError('Error: Can\'t find field {0} in message {1}.'.format( + name, source_descriptor.full_name)) + if child: + # Sub-paths are only allowed for singular message fields. + if (field.label == FieldDescriptor.LABEL_REPEATED or + field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE): + raise ValueError('Error: Field {0} in message {1} is not a singular ' + 'message field and cannot have sub-fields.'.format( + name, source_descriptor.full_name)) + if source.HasField(name): + _MergeMessage( + child, getattr(source, name), getattr(destination, name), + replace_message, replace_repeated) + continue + if field.label == FieldDescriptor.LABEL_REPEATED: + if replace_repeated: + destination.ClearField(_StrConvert(name)) + repeated_source = getattr(source, name) + repeated_destination = getattr(destination, name) + repeated_destination.MergeFrom(repeated_source) + else: + if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + if replace_message: + destination.ClearField(_StrConvert(name)) + if source.HasField(name): + getattr(destination, name).MergeFrom(getattr(source, name)) + else: + setattr(destination, name, getattr(source, name)) + + +def _AddFieldPaths(node, prefix, field_mask): + """Adds the field paths descended from node to field_mask.""" + if not node and prefix: + field_mask.paths.append(prefix) + return + for name in sorted(node): + if prefix: + child_path = prefix + '.' + name + else: + child_path = name + _AddFieldPaths(node[name], child_path, field_mask) + + +def _SetStructValue(struct_value, value): + if value is None: + struct_value.null_value = 0 + elif isinstance(value, bool): + # Note: this check must come before the number check because in Python + # True and False are also considered numbers. + struct_value.bool_value = value + elif isinstance(value, str): + struct_value.string_value = value + elif isinstance(value, (int, float)): + struct_value.number_value = value + elif isinstance(value, (dict, Struct)): + struct_value.struct_value.Clear() + struct_value.struct_value.update(value) + elif isinstance(value, (list, ListValue)): + struct_value.list_value.Clear() + struct_value.list_value.extend(value) + else: + raise ValueError('Unexpected type') + + +def _GetStructValue(struct_value): + which = struct_value.WhichOneof('kind') + if which == 'struct_value': + return struct_value.struct_value + elif which == 'null_value': + return None + elif which == 'number_value': + return struct_value.number_value + elif which == 'string_value': + return struct_value.string_value + elif which == 'bool_value': + return struct_value.bool_value + elif which == 'list_value': + return struct_value.list_value + elif which is None: + raise ValueError('Value not set') + + +class Struct(object): + """Class for Struct message type.""" + + __slots__ = () + + def __getitem__(self, key): + return _GetStructValue(self.fields[key]) + + def __contains__(self, item): + return item in self.fields + + def __setitem__(self, key, value): + _SetStructValue(self.fields[key], value) + + def __delitem__(self, key): + del self.fields[key] + + def __len__(self): + return len(self.fields) + + def __iter__(self): + return iter(self.fields) + + def keys(self): # pylint: disable=invalid-name + return self.fields.keys() + + def values(self): # pylint: disable=invalid-name + return [self[key] for key in self] + + def items(self): # pylint: disable=invalid-name + return [(key, self[key]) for key in self] + + def get_or_create_list(self, key): + """Returns a list for this key, creating if it didn't exist already.""" + if not self.fields[key].HasField('list_value'): + # Clear will mark list_value modified which will indeed create a list. + self.fields[key].list_value.Clear() + return self.fields[key].list_value + + def get_or_create_struct(self, key): + """Returns a struct for this key, creating if it didn't exist already.""" + if not self.fields[key].HasField('struct_value'): + # Clear will mark struct_value modified which will indeed create a struct. + self.fields[key].struct_value.Clear() + return self.fields[key].struct_value + + def update(self, dictionary): # pylint: disable=invalid-name + for key, value in dictionary.items(): + _SetStructValue(self.fields[key], value) + +collections.abc.MutableMapping.register(Struct) + + +class ListValue(object): + """Class for ListValue message type.""" + + __slots__ = () + + def __len__(self): + return len(self.values) + + def append(self, value): + _SetStructValue(self.values.add(), value) + + def extend(self, elem_seq): + for value in elem_seq: + self.append(value) + + def __getitem__(self, index): + """Retrieves item by the specified index.""" + return _GetStructValue(self.values.__getitem__(index)) + + def __setitem__(self, index, value): + _SetStructValue(self.values.__getitem__(index), value) + + def __delitem__(self, key): + del self.values[key] + + def items(self): + for i in range(len(self)): + yield self[i] + + def add_struct(self): + """Appends and returns a struct value as the next value in the list.""" + struct_value = self.values.add().struct_value + # Clear will mark struct_value modified which will indeed create a struct. + struct_value.Clear() + return struct_value + + def add_list(self): + """Appends and returns a list value as the next value in the list.""" + list_value = self.values.add().list_value + # Clear will mark list_value modified which will indeed create a list. + list_value.Clear() + return list_value + +collections.abc.MutableSequence.register(ListValue) + + +WKTBASES = { + 'google.protobuf.Any': Any, + 'google.protobuf.Duration': Duration, + 'google.protobuf.FieldMask': FieldMask, + 'google.protobuf.ListValue': ListValue, + 'google.protobuf.Struct': Struct, + 'google.protobuf.Timestamp': Timestamp, +} diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/wire_format.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/wire_format.py new file mode 100644 index 0000000000..883f525585 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/wire_format.py @@ -0,0 +1,268 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Constants and static functions to support protocol buffer wire format.""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import struct +from google.protobuf import descriptor +from google.protobuf import message + + +TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag. +TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7 + +# These numbers identify the wire type of a protocol buffer value. +# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded +# tag-and-type to store one of these WIRETYPE_* constants. +# These values must match WireType enum in google/protobuf/wire_format.h. +WIRETYPE_VARINT = 0 +WIRETYPE_FIXED64 = 1 +WIRETYPE_LENGTH_DELIMITED = 2 +WIRETYPE_START_GROUP = 3 +WIRETYPE_END_GROUP = 4 +WIRETYPE_FIXED32 = 5 +_WIRETYPE_MAX = 5 + + +# Bounds for various integer types. +INT32_MAX = int((1 << 31) - 1) +INT32_MIN = int(-(1 << 31)) +UINT32_MAX = (1 << 32) - 1 + +INT64_MAX = (1 << 63) - 1 +INT64_MIN = -(1 << 63) +UINT64_MAX = (1 << 64) - 1 + +# "struct" format strings that will encode/decode the specified formats. +FORMAT_UINT32_LITTLE_ENDIAN = '> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK) + + +def ZigZagEncode(value): + """ZigZag Transform: Encodes signed integers so that they can be + effectively used with varint encoding. See wire_format.h for + more details. + """ + if value >= 0: + return value << 1 + return (value << 1) ^ (~0) + + +def ZigZagDecode(value): + """Inverse of ZigZagEncode().""" + if not value & 0x1: + return value >> 1 + return (value >> 1) ^ (~0) + + + +# The *ByteSize() functions below return the number of bytes required to +# serialize "field number + type" information and then serialize the value. + + +def Int32ByteSize(field_number, int32): + return Int64ByteSize(field_number, int32) + + +def Int32ByteSizeNoTag(int32): + return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32) + + +def Int64ByteSize(field_number, int64): + # Have to convert to uint before calling UInt64ByteSize(). + return UInt64ByteSize(field_number, 0xffffffffffffffff & int64) + + +def UInt32ByteSize(field_number, uint32): + return UInt64ByteSize(field_number, uint32) + + +def UInt64ByteSize(field_number, uint64): + return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64) + + +def SInt32ByteSize(field_number, int32): + return UInt32ByteSize(field_number, ZigZagEncode(int32)) + + +def SInt64ByteSize(field_number, int64): + return UInt64ByteSize(field_number, ZigZagEncode(int64)) + + +def Fixed32ByteSize(field_number, fixed32): + return TagByteSize(field_number) + 4 + + +def Fixed64ByteSize(field_number, fixed64): + return TagByteSize(field_number) + 8 + + +def SFixed32ByteSize(field_number, sfixed32): + return TagByteSize(field_number) + 4 + + +def SFixed64ByteSize(field_number, sfixed64): + return TagByteSize(field_number) + 8 + + +def FloatByteSize(field_number, flt): + return TagByteSize(field_number) + 4 + + +def DoubleByteSize(field_number, double): + return TagByteSize(field_number) + 8 + + +def BoolByteSize(field_number, b): + return TagByteSize(field_number) + 1 + + +def EnumByteSize(field_number, enum): + return UInt32ByteSize(field_number, enum) + + +def StringByteSize(field_number, string): + return BytesByteSize(field_number, string.encode('utf-8')) + + +def BytesByteSize(field_number, b): + return (TagByteSize(field_number) + + _VarUInt64ByteSizeNoTag(len(b)) + + len(b)) + + +def GroupByteSize(field_number, message): + return (2 * TagByteSize(field_number) # START and END group. + + message.ByteSize()) + + +def MessageByteSize(field_number, message): + return (TagByteSize(field_number) + + _VarUInt64ByteSizeNoTag(message.ByteSize()) + + message.ByteSize()) + + +def MessageSetItemByteSize(field_number, msg): + # First compute the sizes of the tags. + # There are 2 tags for the beginning and ending of the repeated group, that + # is field number 1, one with field number 2 (type_id) and one with field + # number 3 (message). + total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3)) + + # Add the number of bytes for type_id. + total_size += _VarUInt64ByteSizeNoTag(field_number) + + message_size = msg.ByteSize() + + # The number of bytes for encoding the length of the message. + total_size += _VarUInt64ByteSizeNoTag(message_size) + + # The size of the message. + total_size += message_size + return total_size + + +def TagByteSize(field_number): + """Returns the bytes required to serialize a tag with this field number.""" + # Just pass in type 0, since the type won't affect the tag+type size. + return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0)) + + +# Private helper function for the *ByteSize() functions above. + +def _VarUInt64ByteSizeNoTag(uint64): + """Returns the number of bytes required to serialize a single varint + using boundary value comparisons. (unrolled loop optimization -WPierce) + uint64 must be unsigned. + """ + if uint64 <= 0x7f: return 1 + if uint64 <= 0x3fff: return 2 + if uint64 <= 0x1fffff: return 3 + if uint64 <= 0xfffffff: return 4 + if uint64 <= 0x7ffffffff: return 5 + if uint64 <= 0x3ffffffffff: return 6 + if uint64 <= 0x1ffffffffffff: return 7 + if uint64 <= 0xffffffffffffff: return 8 + if uint64 <= 0x7fffffffffffffff: return 9 + if uint64 > UINT64_MAX: + raise message.EncodeError('Value out of range: %d' % uint64) + return 10 + + +NON_PACKABLE_TYPES = ( + descriptor.FieldDescriptor.TYPE_STRING, + descriptor.FieldDescriptor.TYPE_GROUP, + descriptor.FieldDescriptor.TYPE_MESSAGE, + descriptor.FieldDescriptor.TYPE_BYTES +) + + +def IsTypePackable(field_type): + """Return true iff packable = true is valid for fields of this type. + + Args: + field_type: a FieldDescriptor::Type value. + + Returns: + True iff fields of this type are packable. + """ + return field_type not in NON_PACKABLE_TYPES diff --git a/openpype/hosts/nuke/vendor/google/protobuf/json_format.py b/openpype/hosts/nuke/vendor/google/protobuf/json_format.py new file mode 100644 index 0000000000..5024ed89d7 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/json_format.py @@ -0,0 +1,912 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains routines for printing protocol messages in JSON format. + +Simple usage example: + + # Create a proto object and serialize it to a json format string. + message = my_proto_pb2.MyMessage(foo='bar') + json_string = json_format.MessageToJson(message) + + # Parse a json format string to proto object. + message = json_format.Parse(json_string, my_proto_pb2.MyMessage()) +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + + +import base64 +from collections import OrderedDict +import json +import math +from operator import methodcaller +import re +import sys + +from google.protobuf.internal import type_checkers +from google.protobuf import descriptor +from google.protobuf import symbol_database + + +_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' +_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32, + descriptor.FieldDescriptor.CPPTYPE_UINT32, + descriptor.FieldDescriptor.CPPTYPE_INT64, + descriptor.FieldDescriptor.CPPTYPE_UINT64]) +_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64, + descriptor.FieldDescriptor.CPPTYPE_UINT64]) +_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT, + descriptor.FieldDescriptor.CPPTYPE_DOUBLE]) +_INFINITY = 'Infinity' +_NEG_INFINITY = '-Infinity' +_NAN = 'NaN' + +_UNPAIRED_SURROGATE_PATTERN = re.compile( + u'[\ud800-\udbff](?![\udc00-\udfff])|(? self.max_recursion_depth: + raise ParseError('Message too deep. Max recursion depth is {0}'.format( + self.max_recursion_depth)) + message_descriptor = message.DESCRIPTOR + full_name = message_descriptor.full_name + if not path: + path = message_descriptor.name + if _IsWrapperMessage(message_descriptor): + self._ConvertWrapperMessage(value, message, path) + elif full_name in _WKTJSONMETHODS: + methodcaller(_WKTJSONMETHODS[full_name][1], value, message, path)(self) + else: + self._ConvertFieldValuePair(value, message, path) + self.recursion_depth -= 1 + + def _ConvertFieldValuePair(self, js, message, path): + """Convert field value pairs into regular message. + + Args: + js: A JSON object to convert the field value pairs. + message: A regular protocol message to record the data. + path: parent path to log parse error info. + + Raises: + ParseError: In case of problems converting. + """ + names = [] + message_descriptor = message.DESCRIPTOR + fields_by_json_name = dict((f.json_name, f) + for f in message_descriptor.fields) + for name in js: + try: + field = fields_by_json_name.get(name, None) + if not field: + field = message_descriptor.fields_by_name.get(name, None) + if not field and _VALID_EXTENSION_NAME.match(name): + if not message_descriptor.is_extendable: + raise ParseError( + 'Message type {0} does not have extensions at {1}'.format( + message_descriptor.full_name, path)) + identifier = name[1:-1] # strip [] brackets + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(identifier) + # pylint: enable=protected-access + if not field: + # Try looking for extension by the message type name, dropping the + # field name following the final . separator in full_name. + identifier = '.'.join(identifier.split('.')[:-1]) + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(identifier) + # pylint: enable=protected-access + if not field: + if self.ignore_unknown_fields: + continue + raise ParseError( + ('Message type "{0}" has no field named "{1}" at "{2}".\n' + ' Available Fields(except extensions): "{3}"').format( + message_descriptor.full_name, name, path, + [f.json_name for f in message_descriptor.fields])) + if name in names: + raise ParseError('Message type "{0}" should not have multiple ' + '"{1}" fields at "{2}".'.format( + message.DESCRIPTOR.full_name, name, path)) + names.append(name) + value = js[name] + # Check no other oneof field is parsed. + if field.containing_oneof is not None and value is not None: + oneof_name = field.containing_oneof.name + if oneof_name in names: + raise ParseError('Message type "{0}" should not have multiple ' + '"{1}" oneof fields at "{2}".'.format( + message.DESCRIPTOR.full_name, oneof_name, + path)) + names.append(oneof_name) + + if value is None: + if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE + and field.message_type.full_name == 'google.protobuf.Value'): + sub_message = getattr(message, field.name) + sub_message.null_value = 0 + elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM + and field.enum_type.full_name == 'google.protobuf.NullValue'): + setattr(message, field.name, 0) + else: + message.ClearField(field.name) + continue + + # Parse field value. + if _IsMapEntry(field): + message.ClearField(field.name) + self._ConvertMapFieldValue(value, message, field, + '{0}.{1}'.format(path, name)) + elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + message.ClearField(field.name) + if not isinstance(value, list): + raise ParseError('repeated field {0} must be in [] which is ' + '{1} at {2}'.format(name, value, path)) + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + # Repeated message field. + for index, item in enumerate(value): + sub_message = getattr(message, field.name).add() + # None is a null_value in Value. + if (item is None and + sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'): + raise ParseError('null is not allowed to be used as an element' + ' in a repeated field at {0}.{1}[{2}]'.format( + path, name, index)) + self.ConvertMessage(item, sub_message, + '{0}.{1}[{2}]'.format(path, name, index)) + else: + # Repeated scalar field. + for index, item in enumerate(value): + if item is None: + raise ParseError('null is not allowed to be used as an element' + ' in a repeated field at {0}.{1}[{2}]'.format( + path, name, index)) + getattr(message, field.name).append( + _ConvertScalarFieldValue( + item, field, '{0}.{1}[{2}]'.format(path, name, index))) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + if field.is_extension: + sub_message = message.Extensions[field] + else: + sub_message = getattr(message, field.name) + sub_message.SetInParent() + self.ConvertMessage(value, sub_message, '{0}.{1}'.format(path, name)) + else: + if field.is_extension: + message.Extensions[field] = _ConvertScalarFieldValue( + value, field, '{0}.{1}'.format(path, name)) + else: + setattr( + message, field.name, + _ConvertScalarFieldValue(value, field, + '{0}.{1}'.format(path, name))) + except ParseError as e: + if field and field.containing_oneof is None: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + else: + raise ParseError(str(e)) + except ValueError as e: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + except TypeError as e: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + + def _ConvertAnyMessage(self, value, message, path): + """Convert a JSON representation into Any message.""" + if isinstance(value, dict) and not value: + return + try: + type_url = value['@type'] + except KeyError: + raise ParseError( + '@type is missing when parsing any message at {0}'.format(path)) + + try: + sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool) + except TypeError as e: + raise ParseError('{0} at {1}'.format(e, path)) + message_descriptor = sub_message.DESCRIPTOR + full_name = message_descriptor.full_name + if _IsWrapperMessage(message_descriptor): + self._ConvertWrapperMessage(value['value'], sub_message, + '{0}.value'.format(path)) + elif full_name in _WKTJSONMETHODS: + methodcaller(_WKTJSONMETHODS[full_name][1], value['value'], sub_message, + '{0}.value'.format(path))( + self) + else: + del value['@type'] + self._ConvertFieldValuePair(value, sub_message, path) + value['@type'] = type_url + # Sets Any message + message.value = sub_message.SerializeToString() + message.type_url = type_url + + def _ConvertGenericMessage(self, value, message, path): + """Convert a JSON representation into message with FromJsonString.""" + # Duration, Timestamp, FieldMask have a FromJsonString method to do the + # conversion. Users can also call the method directly. + try: + message.FromJsonString(value) + except ValueError as e: + raise ParseError('{0} at {1}'.format(e, path)) + + def _ConvertValueMessage(self, value, message, path): + """Convert a JSON representation into Value message.""" + if isinstance(value, dict): + self._ConvertStructMessage(value, message.struct_value, path) + elif isinstance(value, list): + self._ConvertListValueMessage(value, message.list_value, path) + elif value is None: + message.null_value = 0 + elif isinstance(value, bool): + message.bool_value = value + elif isinstance(value, str): + message.string_value = value + elif isinstance(value, _INT_OR_FLOAT): + message.number_value = value + else: + raise ParseError('Value {0} has unexpected type {1} at {2}'.format( + value, type(value), path)) + + def _ConvertListValueMessage(self, value, message, path): + """Convert a JSON representation into ListValue message.""" + if not isinstance(value, list): + raise ParseError('ListValue must be in [] which is {0} at {1}'.format( + value, path)) + message.ClearField('values') + for index, item in enumerate(value): + self._ConvertValueMessage(item, message.values.add(), + '{0}[{1}]'.format(path, index)) + + def _ConvertStructMessage(self, value, message, path): + """Convert a JSON representation into Struct message.""" + if not isinstance(value, dict): + raise ParseError('Struct must be in a dict which is {0} at {1}'.format( + value, path)) + # Clear will mark the struct as modified so it will be created even if + # there are no values. + message.Clear() + for key in value: + self._ConvertValueMessage(value[key], message.fields[key], + '{0}.{1}'.format(path, key)) + return + + def _ConvertWrapperMessage(self, value, message, path): + """Convert a JSON representation into Wrapper message.""" + field = message.DESCRIPTOR.fields_by_name['value'] + setattr( + message, 'value', + _ConvertScalarFieldValue(value, field, path='{0}.value'.format(path))) + + def _ConvertMapFieldValue(self, value, message, field, path): + """Convert map field value for a message map field. + + Args: + value: A JSON object to convert the map field value. + message: A protocol message to record the converted data. + field: The descriptor of the map field to be converted. + path: parent path to log parse error info. + + Raises: + ParseError: In case of convert problems. + """ + if not isinstance(value, dict): + raise ParseError( + 'Map field {0} must be in a dict which is {1} at {2}'.format( + field.name, value, path)) + key_field = field.message_type.fields_by_name['key'] + value_field = field.message_type.fields_by_name['value'] + for key in value: + key_value = _ConvertScalarFieldValue(key, key_field, + '{0}.key'.format(path), True) + if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + self.ConvertMessage(value[key], + getattr(message, field.name)[key_value], + '{0}[{1}]'.format(path, key_value)) + else: + getattr(message, field.name)[key_value] = _ConvertScalarFieldValue( + value[key], value_field, path='{0}[{1}]'.format(path, key_value)) + + +def _ConvertScalarFieldValue(value, field, path, require_str=False): + """Convert a single scalar field value. + + Args: + value: A scalar value to convert the scalar field value. + field: The descriptor of the field to convert. + path: parent path to log parse error info. + require_str: If True, the field value must be a str. + + Returns: + The converted scalar field value + + Raises: + ParseError: In case of convert problems. + """ + try: + if field.cpp_type in _INT_TYPES: + return _ConvertInteger(value) + elif field.cpp_type in _FLOAT_TYPES: + return _ConvertFloat(value, field) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: + return _ConvertBool(value, require_str) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: + if field.type == descriptor.FieldDescriptor.TYPE_BYTES: + if isinstance(value, str): + encoded = value.encode('utf-8') + else: + encoded = value + # Add extra padding '=' + padded_value = encoded + b'=' * (4 - len(encoded) % 4) + return base64.urlsafe_b64decode(padded_value) + else: + # Checking for unpaired surrogates appears to be unreliable, + # depending on the specific Python version, so we check manually. + if _UNPAIRED_SURROGATE_PATTERN.search(value): + raise ParseError('Unpaired surrogate') + return value + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: + # Convert an enum value. + enum_value = field.enum_type.values_by_name.get(value, None) + if enum_value is None: + try: + number = int(value) + enum_value = field.enum_type.values_by_number.get(number, None) + except ValueError: + raise ParseError('Invalid enum value {0} for enum type {1}'.format( + value, field.enum_type.full_name)) + if enum_value is None: + if field.file.syntax == 'proto3': + # Proto3 accepts unknown enums. + return number + raise ParseError('Invalid enum value {0} for enum type {1}'.format( + value, field.enum_type.full_name)) + return enum_value.number + except ParseError as e: + raise ParseError('{0} at {1}'.format(e, path)) + + +def _ConvertInteger(value): + """Convert an integer. + + Args: + value: A scalar value to convert. + + Returns: + The integer value. + + Raises: + ParseError: If an integer couldn't be consumed. + """ + if isinstance(value, float) and not value.is_integer(): + raise ParseError('Couldn\'t parse integer: {0}'.format(value)) + + if isinstance(value, str) and value.find(' ') != -1: + raise ParseError('Couldn\'t parse integer: "{0}"'.format(value)) + + if isinstance(value, bool): + raise ParseError('Bool value {0} is not acceptable for ' + 'integer field'.format(value)) + + return int(value) + + +def _ConvertFloat(value, field): + """Convert an floating point number.""" + if isinstance(value, float): + if math.isnan(value): + raise ParseError('Couldn\'t parse NaN, use quoted "NaN" instead') + if math.isinf(value): + if value > 0: + raise ParseError('Couldn\'t parse Infinity or value too large, ' + 'use quoted "Infinity" instead') + else: + raise ParseError('Couldn\'t parse -Infinity or value too small, ' + 'use quoted "-Infinity" instead') + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: + # pylint: disable=protected-access + if value > type_checkers._FLOAT_MAX: + raise ParseError('Float value too large') + # pylint: disable=protected-access + if value < type_checkers._FLOAT_MIN: + raise ParseError('Float value too small') + if value == 'nan': + raise ParseError('Couldn\'t parse float "nan", use "NaN" instead') + try: + # Assume Python compatible syntax. + return float(value) + except ValueError: + # Check alternative spellings. + if value == _NEG_INFINITY: + return float('-inf') + elif value == _INFINITY: + return float('inf') + elif value == _NAN: + return float('nan') + else: + raise ParseError('Couldn\'t parse float: {0}'.format(value)) + + +def _ConvertBool(value, require_str): + """Convert a boolean value. + + Args: + value: A scalar value to convert. + require_str: If True, value must be a str. + + Returns: + The bool parsed. + + Raises: + ParseError: If a boolean value couldn't be consumed. + """ + if require_str: + if value == 'true': + return True + elif value == 'false': + return False + else: + raise ParseError('Expected "true" or "false", not {0}'.format(value)) + + if not isinstance(value, bool): + raise ParseError('Expected true or false without quotes') + return value + +_WKTJSONMETHODS = { + 'google.protobuf.Any': ['_AnyMessageToJsonObject', + '_ConvertAnyMessage'], + 'google.protobuf.Duration': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.FieldMask': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.ListValue': ['_ListValueMessageToJsonObject', + '_ConvertListValueMessage'], + 'google.protobuf.Struct': ['_StructMessageToJsonObject', + '_ConvertStructMessage'], + 'google.protobuf.Timestamp': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.Value': ['_ValueMessageToJsonObject', + '_ConvertValueMessage'] +} diff --git a/openpype/hosts/nuke/vendor/google/protobuf/message.py b/openpype/hosts/nuke/vendor/google/protobuf/message.py new file mode 100644 index 0000000000..76c6802f70 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/message.py @@ -0,0 +1,424 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# TODO(robinson): We should just make these methods all "pure-virtual" and move +# all implementation out, into reflection.py for now. + + +"""Contains an abstract base class for protocol messages.""" + +__author__ = 'robinson@google.com (Will Robinson)' + +class Error(Exception): + """Base error type for this module.""" + pass + + +class DecodeError(Error): + """Exception raised when deserializing messages.""" + pass + + +class EncodeError(Error): + """Exception raised when serializing messages.""" + pass + + +class Message(object): + + """Abstract base class for protocol messages. + + Protocol message classes are almost always generated by the protocol + compiler. These generated types subclass Message and implement the methods + shown below. + """ + + # TODO(robinson): Link to an HTML document here. + + # TODO(robinson): Document that instances of this class will also + # have an Extensions attribute with __getitem__ and __setitem__. + # Again, not sure how to best convey this. + + # TODO(robinson): Document that the class must also have a static + # RegisterExtension(extension_field) method. + # Not sure how to best express at this point. + + # TODO(robinson): Document these fields and methods. + + __slots__ = [] + + #: The :class:`google.protobuf.descriptor.Descriptor` for this message type. + DESCRIPTOR = None + + def __deepcopy__(self, memo=None): + clone = type(self)() + clone.MergeFrom(self) + return clone + + def __eq__(self, other_msg): + """Recursively compares two messages by value and structure.""" + raise NotImplementedError + + def __ne__(self, other_msg): + # Can't just say self != other_msg, since that would infinitely recurse. :) + return not self == other_msg + + def __hash__(self): + raise TypeError('unhashable object') + + def __str__(self): + """Outputs a human-readable representation of the message.""" + raise NotImplementedError + + def __unicode__(self): + """Outputs a human-readable representation of the message.""" + raise NotImplementedError + + def MergeFrom(self, other_msg): + """Merges the contents of the specified message into current message. + + This method merges the contents of the specified message into the current + message. Singular fields that are set in the specified message overwrite + the corresponding fields in the current message. Repeated fields are + appended. Singular sub-messages and groups are recursively merged. + + Args: + other_msg (Message): A message to merge into the current message. + """ + raise NotImplementedError + + def CopyFrom(self, other_msg): + """Copies the content of the specified message into the current message. + + The method clears the current message and then merges the specified + message using MergeFrom. + + Args: + other_msg (Message): A message to copy into the current one. + """ + if self is other_msg: + return + self.Clear() + self.MergeFrom(other_msg) + + def Clear(self): + """Clears all data that was set in the message.""" + raise NotImplementedError + + def SetInParent(self): + """Mark this as present in the parent. + + This normally happens automatically when you assign a field of a + sub-message, but sometimes you want to make the sub-message + present while keeping it empty. If you find yourself using this, + you may want to reconsider your design. + """ + raise NotImplementedError + + def IsInitialized(self): + """Checks if the message is initialized. + + Returns: + bool: The method returns True if the message is initialized (i.e. all of + its required fields are set). + """ + raise NotImplementedError + + # TODO(robinson): MergeFromString() should probably return None and be + # implemented in terms of a helper that returns the # of bytes read. Our + # deserialization routines would use the helper when recursively + # deserializing, but the end user would almost always just want the no-return + # MergeFromString(). + + def MergeFromString(self, serialized): + """Merges serialized protocol buffer data into this message. + + When we find a field in `serialized` that is already present + in this message: + + - If it's a "repeated" field, we append to the end of our list. + - Else, if it's a scalar, we overwrite our field. + - Else, (it's a nonrepeated composite), we recursively merge + into the existing composite. + + Args: + serialized (bytes): Any object that allows us to call + ``memoryview(serialized)`` to access a string of bytes using the + buffer interface. + + Returns: + int: The number of bytes read from `serialized`. + For non-group messages, this will always be `len(serialized)`, + but for messages which are actually groups, this will + generally be less than `len(serialized)`, since we must + stop when we reach an ``END_GROUP`` tag. Note that if + we *do* stop because of an ``END_GROUP`` tag, the number + of bytes returned does not include the bytes + for the ``END_GROUP`` tag information. + + Raises: + DecodeError: if the input cannot be parsed. + """ + # TODO(robinson): Document handling of unknown fields. + # TODO(robinson): When we switch to a helper, this will return None. + raise NotImplementedError + + def ParseFromString(self, serialized): + """Parse serialized protocol buffer data into this message. + + Like :func:`MergeFromString()`, except we clear the object first. + + Raises: + message.DecodeError if the input cannot be parsed. + """ + self.Clear() + return self.MergeFromString(serialized) + + def SerializeToString(self, **kwargs): + """Serializes the protocol message to a binary string. + + Keyword Args: + deterministic (bool): If true, requests deterministic serialization + of the protobuf, with predictable ordering of map keys. + + Returns: + A binary string representation of the message if all of the required + fields in the message are set (i.e. the message is initialized). + + Raises: + EncodeError: if the message isn't initialized (see :func:`IsInitialized`). + """ + raise NotImplementedError + + def SerializePartialToString(self, **kwargs): + """Serializes the protocol message to a binary string. + + This method is similar to SerializeToString but doesn't check if the + message is initialized. + + Keyword Args: + deterministic (bool): If true, requests deterministic serialization + of the protobuf, with predictable ordering of map keys. + + Returns: + bytes: A serialized representation of the partial message. + """ + raise NotImplementedError + + # TODO(robinson): Decide whether we like these better + # than auto-generated has_foo() and clear_foo() methods + # on the instances themselves. This way is less consistent + # with C++, but it makes reflection-type access easier and + # reduces the number of magically autogenerated things. + # + # TODO(robinson): Be sure to document (and test) exactly + # which field names are accepted here. Are we case-sensitive? + # What do we do with fields that share names with Python keywords + # like 'lambda' and 'yield'? + # + # nnorwitz says: + # """ + # Typically (in python), an underscore is appended to names that are + # keywords. So they would become lambda_ or yield_. + # """ + def ListFields(self): + """Returns a list of (FieldDescriptor, value) tuples for present fields. + + A message field is non-empty if HasField() would return true. A singular + primitive field is non-empty if HasField() would return true in proto2 or it + is non zero in proto3. A repeated field is non-empty if it contains at least + one element. The fields are ordered by field number. + + Returns: + list[tuple(FieldDescriptor, value)]: field descriptors and values + for all fields in the message which are not empty. The values vary by + field type. + """ + raise NotImplementedError + + def HasField(self, field_name): + """Checks if a certain field is set for the message. + + For a oneof group, checks if any field inside is set. Note that if the + field_name is not defined in the message descriptor, :exc:`ValueError` will + be raised. + + Args: + field_name (str): The name of the field to check for presence. + + Returns: + bool: Whether a value has been set for the named field. + + Raises: + ValueError: if the `field_name` is not a member of this message. + """ + raise NotImplementedError + + def ClearField(self, field_name): + """Clears the contents of a given field. + + Inside a oneof group, clears the field set. If the name neither refers to a + defined field or oneof group, :exc:`ValueError` is raised. + + Args: + field_name (str): The name of the field to check for presence. + + Raises: + ValueError: if the `field_name` is not a member of this message. + """ + raise NotImplementedError + + def WhichOneof(self, oneof_group): + """Returns the name of the field that is set inside a oneof group. + + If no field is set, returns None. + + Args: + oneof_group (str): the name of the oneof group to check. + + Returns: + str or None: The name of the group that is set, or None. + + Raises: + ValueError: no group with the given name exists + """ + raise NotImplementedError + + def HasExtension(self, extension_handle): + """Checks if a certain extension is present for this message. + + Extensions are retrieved using the :attr:`Extensions` mapping (if present). + + Args: + extension_handle: The handle for the extension to check. + + Returns: + bool: Whether the extension is present for this message. + + Raises: + KeyError: if the extension is repeated. Similar to repeated fields, + there is no separate notion of presence: a "not present" repeated + extension is an empty list. + """ + raise NotImplementedError + + def ClearExtension(self, extension_handle): + """Clears the contents of a given extension. + + Args: + extension_handle: The handle for the extension to clear. + """ + raise NotImplementedError + + def UnknownFields(self): + """Returns the UnknownFieldSet. + + Returns: + UnknownFieldSet: The unknown fields stored in this message. + """ + raise NotImplementedError + + def DiscardUnknownFields(self): + """Clears all fields in the :class:`UnknownFieldSet`. + + This operation is recursive for nested message. + """ + raise NotImplementedError + + def ByteSize(self): + """Returns the serialized size of this message. + + Recursively calls ByteSize() on all contained messages. + + Returns: + int: The number of bytes required to serialize this message. + """ + raise NotImplementedError + + @classmethod + def FromString(cls, s): + raise NotImplementedError + + @staticmethod + def RegisterExtension(extension_handle): + raise NotImplementedError + + def _SetListener(self, message_listener): + """Internal method used by the protocol message implementation. + Clients should not call this directly. + + Sets a listener that this message will call on certain state transitions. + + The purpose of this method is to register back-edges from children to + parents at runtime, for the purpose of setting "has" bits and + byte-size-dirty bits in the parent and ancestor objects whenever a child or + descendant object is modified. + + If the client wants to disconnect this Message from the object tree, she + explicitly sets callback to None. + + If message_listener is None, unregisters any existing listener. Otherwise, + message_listener must implement the MessageListener interface in + internal/message_listener.py, and we discard any listener registered + via a previous _SetListener() call. + """ + raise NotImplementedError + + def __getstate__(self): + """Support the pickle protocol.""" + return dict(serialized=self.SerializePartialToString()) + + def __setstate__(self, state): + """Support the pickle protocol.""" + self.__init__() + serialized = state['serialized'] + # On Python 3, using encoding='latin1' is required for unpickling + # protos pickled by Python 2. + if not isinstance(serialized, bytes): + serialized = serialized.encode('latin1') + self.ParseFromString(serialized) + + def __reduce__(self): + message_descriptor = self.DESCRIPTOR + if message_descriptor.containing_type is None: + return type(self), (), self.__getstate__() + # the message type must be nested. + # Python does not pickle nested classes; use the symbol_database on the + # receiving end. + container = message_descriptor + return (_InternalConstructMessage, (container.full_name,), + self.__getstate__()) + + +def _InternalConstructMessage(full_name): + """Constructs a nested message.""" + from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top + + return symbol_database.Default().GetSymbol(full_name)() diff --git a/openpype/hosts/nuke/vendor/google/protobuf/message_factory.py b/openpype/hosts/nuke/vendor/google/protobuf/message_factory.py new file mode 100644 index 0000000000..3656fa6874 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/message_factory.py @@ -0,0 +1,185 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides a factory class for generating dynamic messages. + +The easiest way to use this class is if you have access to the FileDescriptor +protos containing the messages you want to create you can just do the following: + +message_classes = message_factory.GetMessages(iterable_of_file_descriptors) +my_proto_instance = message_classes['some.proto.package.MessageName']() +""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +from google.protobuf.internal import api_implementation +from google.protobuf import descriptor_pool +from google.protobuf import message + +if api_implementation.Type() == 'cpp': + from google.protobuf.pyext import cpp_message as message_impl +else: + from google.protobuf.internal import python_message as message_impl + + +# The type of all Message classes. +_GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType + + +class MessageFactory(object): + """Factory for creating Proto2 messages from descriptors in a pool.""" + + def __init__(self, pool=None): + """Initializes a new factory.""" + self.pool = pool or descriptor_pool.DescriptorPool() + + # local cache of all classes built from protobuf descriptors + self._classes = {} + + def GetPrototype(self, descriptor): + """Obtains a proto2 message class based on the passed in descriptor. + + Passing a descriptor with a fully qualified name matching a previous + invocation will cause the same class to be returned. + + Args: + descriptor: The descriptor to build from. + + Returns: + A class describing the passed in descriptor. + """ + if descriptor not in self._classes: + result_class = self.CreatePrototype(descriptor) + # The assignment to _classes is redundant for the base implementation, but + # might avoid confusion in cases where CreatePrototype gets overridden and + # does not call the base implementation. + self._classes[descriptor] = result_class + return result_class + return self._classes[descriptor] + + def CreatePrototype(self, descriptor): + """Builds a proto2 message class based on the passed in descriptor. + + Don't call this function directly, it always creates a new class. Call + GetPrototype() instead. This method is meant to be overridden in subblasses + to perform additional operations on the newly constructed class. + + Args: + descriptor: The descriptor to build from. + + Returns: + A class describing the passed in descriptor. + """ + descriptor_name = descriptor.name + result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE( + descriptor_name, + (message.Message,), + { + 'DESCRIPTOR': descriptor, + # If module not set, it wrongly points to message_factory module. + '__module__': None, + }) + result_class._FACTORY = self # pylint: disable=protected-access + # Assign in _classes before doing recursive calls to avoid infinite + # recursion. + self._classes[descriptor] = result_class + for field in descriptor.fields: + if field.message_type: + self.GetPrototype(field.message_type) + for extension in result_class.DESCRIPTOR.extensions: + if extension.containing_type not in self._classes: + self.GetPrototype(extension.containing_type) + extended_class = self._classes[extension.containing_type] + extended_class.RegisterExtension(extension) + return result_class + + def GetMessages(self, files): + """Gets all the messages from a specified file. + + This will find and resolve dependencies, failing if the descriptor + pool cannot satisfy them. + + Args: + files: The file names to extract messages from. + + Returns: + A dictionary mapping proto names to the message classes. This will include + any dependent messages as well as any messages defined in the same file as + a specified message. + """ + result = {} + for file_name in files: + file_desc = self.pool.FindFileByName(file_name) + for desc in file_desc.message_types_by_name.values(): + result[desc.full_name] = self.GetPrototype(desc) + + # While the extension FieldDescriptors are created by the descriptor pool, + # the python classes created in the factory need them to be registered + # explicitly, which is done below. + # + # The call to RegisterExtension will specifically check if the + # extension was already registered on the object and either + # ignore the registration if the original was the same, or raise + # an error if they were different. + + for extension in file_desc.extensions_by_name.values(): + if extension.containing_type not in self._classes: + self.GetPrototype(extension.containing_type) + extended_class = self._classes[extension.containing_type] + extended_class.RegisterExtension(extension) + return result + + +_FACTORY = MessageFactory() + + +def GetMessages(file_protos): + """Builds a dictionary of all the messages available in a set of files. + + Args: + file_protos: Iterable of FileDescriptorProto to build messages out of. + + Returns: + A dictionary mapping proto names to the message classes. This will include + any dependent messages as well as any messages defined in the same file as + a specified message. + """ + # The cpp implementation of the protocol buffer library requires to add the + # message in topological order of the dependency graph. + file_by_name = {file_proto.name: file_proto for file_proto in file_protos} + def _AddFile(file_proto): + for dependency in file_proto.dependency: + if dependency in file_by_name: + # Remove from elements to be visited, in order to cut cycles. + _AddFile(file_by_name.pop(dependency)) + _FACTORY.pool.Add(file_proto) + while file_by_name: + _AddFile(file_by_name.popitem()[1]) + return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos]) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/proto_builder.py b/openpype/hosts/nuke/vendor/google/protobuf/proto_builder.py new file mode 100644 index 0000000000..a4667ce63e --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/proto_builder.py @@ -0,0 +1,134 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Dynamic Protobuf class creator.""" + +from collections import OrderedDict +import hashlib +import os + +from google.protobuf import descriptor_pb2 +from google.protobuf import descriptor +from google.protobuf import message_factory + + +def _GetMessageFromFactory(factory, full_name): + """Get a proto class from the MessageFactory by name. + + Args: + factory: a MessageFactory instance. + full_name: str, the fully qualified name of the proto type. + Returns: + A class, for the type identified by full_name. + Raises: + KeyError, if the proto is not found in the factory's descriptor pool. + """ + proto_descriptor = factory.pool.FindMessageTypeByName(full_name) + proto_cls = factory.GetPrototype(proto_descriptor) + return proto_cls + + +def MakeSimpleProtoClass(fields, full_name=None, pool=None): + """Create a Protobuf class whose fields are basic types. + + Note: this doesn't validate field names! + + Args: + fields: dict of {name: field_type} mappings for each field in the proto. If + this is an OrderedDict the order will be maintained, otherwise the + fields will be sorted by name. + full_name: optional str, the fully-qualified name of the proto type. + pool: optional DescriptorPool instance. + Returns: + a class, the new protobuf class with a FileDescriptor. + """ + factory = message_factory.MessageFactory(pool=pool) + + if full_name is not None: + try: + proto_cls = _GetMessageFromFactory(factory, full_name) + return proto_cls + except KeyError: + # The factory's DescriptorPool doesn't know about this class yet. + pass + + # Get a list of (name, field_type) tuples from the fields dict. If fields was + # an OrderedDict we keep the order, but otherwise we sort the field to ensure + # consistent ordering. + field_items = fields.items() + if not isinstance(fields, OrderedDict): + field_items = sorted(field_items) + + # Use a consistent file name that is unlikely to conflict with any imported + # proto files. + fields_hash = hashlib.sha1() + for f_name, f_type in field_items: + fields_hash.update(f_name.encode('utf-8')) + fields_hash.update(str(f_type).encode('utf-8')) + proto_file_name = fields_hash.hexdigest() + '.proto' + + # If the proto is anonymous, use the same hash to name it. + if full_name is None: + full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' + + fields_hash.hexdigest()) + try: + proto_cls = _GetMessageFromFactory(factory, full_name) + return proto_cls + except KeyError: + # The factory's DescriptorPool doesn't know about this class yet. + pass + + # This is the first time we see this proto: add a new descriptor to the pool. + factory.pool.Add( + _MakeFileDescriptorProto(proto_file_name, full_name, field_items)) + return _GetMessageFromFactory(factory, full_name) + + +def _MakeFileDescriptorProto(proto_file_name, full_name, field_items): + """Populate FileDescriptorProto for MessageFactory's DescriptorPool.""" + package, name = full_name.rsplit('.', 1) + file_proto = descriptor_pb2.FileDescriptorProto() + file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name) + file_proto.package = package + desc_proto = file_proto.message_type.add() + desc_proto.name = name + for f_number, (f_name, f_type) in enumerate(field_items, 1): + field_proto = desc_proto.field.add() + field_proto.name = f_name + # # If the number falls in the reserved range, reassign it to the correct + # # number after the range. + if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER: + f_number += ( + descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER - + descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1) + field_proto.number = f_number + field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL + field_proto.type = f_type + return file_proto diff --git a/openpype/hosts/nuke/vendor/google/protobuf/pyext/__init__.py b/openpype/hosts/nuke/vendor/google/protobuf/pyext/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/nuke/vendor/google/protobuf/pyext/cpp_message.py b/openpype/hosts/nuke/vendor/google/protobuf/pyext/cpp_message.py new file mode 100644 index 0000000000..fc8eb32d79 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/pyext/cpp_message.py @@ -0,0 +1,65 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Protocol message implementation hooks for C++ implementation. + +Contains helper functions used to create protocol message classes from +Descriptor objects at runtime backed by the protocol buffer C++ API. +""" + +__author__ = 'tibell@google.com (Johan Tibell)' + +from google.protobuf.pyext import _message + + +class GeneratedProtocolMessageType(_message.MessageMeta): + + """Metaclass for protocol message classes created at runtime from Descriptors. + + The protocol compiler currently uses this metaclass to create protocol + message classes at runtime. Clients can also manually create their own + classes at runtime, as in this example: + + mydescriptor = Descriptor(.....) + factory = symbol_database.Default() + factory.pool.AddDescriptor(mydescriptor) + MyProtoClass = factory.GetPrototype(mydescriptor) + myproto_instance = MyProtoClass() + myproto.foo_field = 23 + ... + + The above example will not work for nested types. If you wish to include them, + use reflection.MakeClass() instead of manually instantiating the class in + order to create the appropriate class structure. + """ + + # Must be consistent with the protocol-compiler code in + # proto2/compiler/internal/generator.*. + _DESCRIPTOR_KEY = 'DESCRIPTOR' diff --git a/openpype/hosts/nuke/vendor/google/protobuf/pyext/python_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/pyext/python_pb2.py new file mode 100644 index 0000000000..2c6ecf4c98 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/pyext/python_pb2.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/pyext/python.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"google/protobuf/pyext/python.proto\x12\x1fgoogle.protobuf.python.internal\"\xbc\x02\n\x0cTestAllTypes\x12\\\n\x17repeated_nested_message\x18\x01 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\\\n\x17optional_nested_message\x18\x02 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\x16\n\x0eoptional_int32\x18\x03 \x01(\x05\x1aX\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x12;\n\x02\x63\x63\x18\x02 \x01(\x0b\x32/.google.protobuf.python.internal.ForeignMessage\"&\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\x12\t\n\x01\x64\x18\x02 \x03(\x05\"\x1d\n\x11TestAllExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02:\x9a\x01\n!optional_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x01 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage:\x9a\x01\n!repeated_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x02 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessageB\x02H\x01') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.pyext.python_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestAllExtensions.RegisterExtension(optional_nested_message_extension) + TestAllExtensions.RegisterExtension(repeated_nested_message_extension) + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'H\001' + _TESTALLTYPES._serialized_start=72 + _TESTALLTYPES._serialized_end=388 + _TESTALLTYPES_NESTEDMESSAGE._serialized_start=300 + _TESTALLTYPES_NESTEDMESSAGE._serialized_end=388 + _FOREIGNMESSAGE._serialized_start=390 + _FOREIGNMESSAGE._serialized_end=428 + _TESTALLEXTENSIONS._serialized_start=430 + _TESTALLEXTENSIONS._serialized_end=459 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/reflection.py b/openpype/hosts/nuke/vendor/google/protobuf/reflection.py new file mode 100644 index 0000000000..81e18859a8 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/reflection.py @@ -0,0 +1,95 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# This code is meant to work on Python 2.4 and above only. + +"""Contains a metaclass and helper functions used to create +protocol message classes from Descriptor objects at runtime. + +Recall that a metaclass is the "type" of a class. +(A class is to a metaclass what an instance is to a class.) + +In this case, we use the GeneratedProtocolMessageType metaclass +to inject all the useful functionality into the classes +output by the protocol compiler at compile-time. + +The upshot of all this is that the real implementation +details for ALL pure-Python protocol buffers are *here in +this file*. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + + +from google.protobuf import message_factory +from google.protobuf import symbol_database + +# The type of all Message classes. +# Part of the public interface, but normally only used by message factories. +GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE + +MESSAGE_CLASS_CACHE = {} + + +# Deprecated. Please NEVER use reflection.ParseMessage(). +def ParseMessage(descriptor, byte_str): + """Generate a new Message instance from this Descriptor and a byte string. + + DEPRECATED: ParseMessage is deprecated because it is using MakeClass(). + Please use MessageFactory.GetPrototype() instead. + + Args: + descriptor: Protobuf Descriptor object + byte_str: Serialized protocol buffer byte string + + Returns: + Newly created protobuf Message object. + """ + result_class = MakeClass(descriptor) + new_msg = result_class() + new_msg.ParseFromString(byte_str) + return new_msg + + +# Deprecated. Please NEVER use reflection.MakeClass(). +def MakeClass(descriptor): + """Construct a class object for a protobuf described by descriptor. + + DEPRECATED: use MessageFactory.GetPrototype() instead. + + Args: + descriptor: A descriptor.Descriptor object describing the protobuf. + Returns: + The Message class object described by the descriptor. + """ + # Original implementation leads to duplicate message classes, which won't play + # well with extensions. Message factory info is also missing. + # Redirect to message_factory. + return symbol_database.Default().GetPrototype(descriptor) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/service.py b/openpype/hosts/nuke/vendor/google/protobuf/service.py new file mode 100644 index 0000000000..5625246324 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/service.py @@ -0,0 +1,228 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""DEPRECATED: Declares the RPC service interfaces. + +This module declares the abstract interfaces underlying proto2 RPC +services. These are intended to be independent of any particular RPC +implementation, so that proto2 services can be used on top of a variety +of implementations. Starting with version 2.3.0, RPC implementations should +not try to build on these, but should instead provide code generator plugins +which generate code specific to the particular RPC implementation. This way +the generated code can be more appropriate for the implementation in use +and can avoid unnecessary layers of indirection. +""" + +__author__ = 'petar@google.com (Petar Petrov)' + + +class RpcException(Exception): + """Exception raised on failed blocking RPC method call.""" + pass + + +class Service(object): + + """Abstract base interface for protocol-buffer-based RPC services. + + Services themselves are abstract classes (implemented either by servers or as + stubs), but they subclass this base interface. The methods of this + interface can be used to call the methods of the service without knowing + its exact type at compile time (analogous to the Message interface). + """ + + def GetDescriptor(): + """Retrieves this service's descriptor.""" + raise NotImplementedError + + def CallMethod(self, method_descriptor, rpc_controller, + request, done): + """Calls a method of the service specified by method_descriptor. + + If "done" is None then the call is blocking and the response + message will be returned directly. Otherwise the call is asynchronous + and "done" will later be called with the response value. + + In the blocking case, RpcException will be raised on error. + + Preconditions: + + * method_descriptor.service == GetDescriptor + * request is of the exact same classes as returned by + GetRequestClass(method). + * After the call has started, the request must not be modified. + * "rpc_controller" is of the correct type for the RPC implementation being + used by this Service. For stubs, the "correct type" depends on the + RpcChannel which the stub is using. + + Postconditions: + + * "done" will be called when the method is complete. This may be + before CallMethod() returns or it may be at some point in the future. + * If the RPC failed, the response value passed to "done" will be None. + Further details about the failure can be found by querying the + RpcController. + """ + raise NotImplementedError + + def GetRequestClass(self, method_descriptor): + """Returns the class of the request message for the specified method. + + CallMethod() requires that the request is of a particular subclass of + Message. GetRequestClass() gets the default instance of this required + type. + + Example: + method = service.GetDescriptor().FindMethodByName("Foo") + request = stub.GetRequestClass(method)() + request.ParseFromString(input) + service.CallMethod(method, request, callback) + """ + raise NotImplementedError + + def GetResponseClass(self, method_descriptor): + """Returns the class of the response message for the specified method. + + This method isn't really needed, as the RpcChannel's CallMethod constructs + the response protocol message. It's provided anyway in case it is useful + for the caller to know the response type in advance. + """ + raise NotImplementedError + + +class RpcController(object): + + """An RpcController mediates a single method call. + + The primary purpose of the controller is to provide a way to manipulate + settings specific to the RPC implementation and to find out about RPC-level + errors. The methods provided by the RpcController interface are intended + to be a "least common denominator" set of features which we expect all + implementations to support. Specific implementations may provide more + advanced features (e.g. deadline propagation). + """ + + # Client-side methods below + + def Reset(self): + """Resets the RpcController to its initial state. + + After the RpcController has been reset, it may be reused in + a new call. Must not be called while an RPC is in progress. + """ + raise NotImplementedError + + def Failed(self): + """Returns true if the call failed. + + After a call has finished, returns true if the call failed. The possible + reasons for failure depend on the RPC implementation. Failed() must not + be called before a call has finished. If Failed() returns true, the + contents of the response message are undefined. + """ + raise NotImplementedError + + def ErrorText(self): + """If Failed is true, returns a human-readable description of the error.""" + raise NotImplementedError + + def StartCancel(self): + """Initiate cancellation. + + Advises the RPC system that the caller desires that the RPC call be + canceled. The RPC system may cancel it immediately, may wait awhile and + then cancel it, or may not even cancel the call at all. If the call is + canceled, the "done" callback will still be called and the RpcController + will indicate that the call failed at that time. + """ + raise NotImplementedError + + # Server-side methods below + + def SetFailed(self, reason): + """Sets a failure reason. + + Causes Failed() to return true on the client side. "reason" will be + incorporated into the message returned by ErrorText(). If you find + you need to return machine-readable information about failures, you + should incorporate it into your response protocol buffer and should + NOT call SetFailed(). + """ + raise NotImplementedError + + def IsCanceled(self): + """Checks if the client cancelled the RPC. + + If true, indicates that the client canceled the RPC, so the server may + as well give up on replying to it. The server should still call the + final "done" callback. + """ + raise NotImplementedError + + def NotifyOnCancel(self, callback): + """Sets a callback to invoke on cancel. + + Asks that the given callback be called when the RPC is canceled. The + callback will always be called exactly once. If the RPC completes without + being canceled, the callback will be called after completion. If the RPC + has already been canceled when NotifyOnCancel() is called, the callback + will be called immediately. + + NotifyOnCancel() must be called no more than once per request. + """ + raise NotImplementedError + + +class RpcChannel(object): + + """Abstract interface for an RPC channel. + + An RpcChannel represents a communication line to a service which can be used + to call that service's methods. The service may be running on another + machine. Normally, you should not use an RpcChannel directly, but instead + construct a stub {@link Service} wrapping it. Example: + + Example: + RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234") + RpcController controller = rpcImpl.Controller() + MyService service = MyService_Stub(channel) + service.MyMethod(controller, request, callback) + """ + + def CallMethod(self, method_descriptor, rpc_controller, + request, response_class, done): + """Calls the method identified by the descriptor. + + Call the given method of the remote service. The signature of this + procedure looks the same as Service.CallMethod(), but the requirements + are less strict in one important way: the request object doesn't have to + be of any specific class as long as its descriptor is method.input_type. + """ + raise NotImplementedError diff --git a/openpype/hosts/nuke/vendor/google/protobuf/service_reflection.py b/openpype/hosts/nuke/vendor/google/protobuf/service_reflection.py new file mode 100644 index 0000000000..f82ab7145a --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/service_reflection.py @@ -0,0 +1,295 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains metaclasses used to create protocol service and service stub +classes from ServiceDescriptor objects at runtime. + +The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to +inject all useful functionality into the classes output by the protocol +compiler at compile-time. +""" + +__author__ = 'petar@google.com (Petar Petrov)' + + +class GeneratedServiceType(type): + + """Metaclass for service classes created at runtime from ServiceDescriptors. + + Implementations for all methods described in the Service class are added here + by this class. We also create properties to allow getting/setting all fields + in the protocol message. + + The protocol compiler currently uses this metaclass to create protocol service + classes at runtime. Clients can also manually create their own classes at + runtime, as in this example:: + + mydescriptor = ServiceDescriptor(.....) + class MyProtoService(service.Service): + __metaclass__ = GeneratedServiceType + DESCRIPTOR = mydescriptor + myservice_instance = MyProtoService() + # ... + """ + + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __init__(cls, name, bases, dictionary): + """Creates a message service class. + + Args: + name: Name of the class (ignored, but required by the metaclass + protocol). + bases: Base classes of the class being constructed. + dictionary: The class dictionary of the class being constructed. + dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object + describing this protocol service type. + """ + # Don't do anything if this class doesn't have a descriptor. This happens + # when a service class is subclassed. + if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary: + return + + descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY] + service_builder = _ServiceBuilder(descriptor) + service_builder.BuildService(cls) + cls.DESCRIPTOR = descriptor + + +class GeneratedServiceStubType(GeneratedServiceType): + + """Metaclass for service stubs created at runtime from ServiceDescriptors. + + This class has similar responsibilities as GeneratedServiceType, except that + it creates the service stub classes. + """ + + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __init__(cls, name, bases, dictionary): + """Creates a message service stub class. + + Args: + name: Name of the class (ignored, here). + bases: Base classes of the class being constructed. + dictionary: The class dictionary of the class being constructed. + dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object + describing this protocol service type. + """ + super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary) + # Don't do anything if this class doesn't have a descriptor. This happens + # when a service stub is subclassed. + if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary: + return + + descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY] + service_stub_builder = _ServiceStubBuilder(descriptor) + service_stub_builder.BuildServiceStub(cls) + + +class _ServiceBuilder(object): + + """This class constructs a protocol service class using a service descriptor. + + Given a service descriptor, this class constructs a class that represents + the specified service descriptor. One service builder instance constructs + exactly one service class. That means all instances of that class share the + same builder. + """ + + def __init__(self, service_descriptor): + """Initializes an instance of the service class builder. + + Args: + service_descriptor: ServiceDescriptor to use when constructing the + service class. + """ + self.descriptor = service_descriptor + + def BuildService(builder, cls): + """Constructs the service class. + + Args: + cls: The class that will be constructed. + """ + + # CallMethod needs to operate with an instance of the Service class. This + # internal wrapper function exists only to be able to pass the service + # instance to the method that does the real CallMethod work. + # Making sure to use exact argument names from the abstract interface in + # service.py to match the type signature + def _WrapCallMethod(self, method_descriptor, rpc_controller, request, done): + return builder._CallMethod(self, method_descriptor, rpc_controller, + request, done) + + def _WrapGetRequestClass(self, method_descriptor): + return builder._GetRequestClass(method_descriptor) + + def _WrapGetResponseClass(self, method_descriptor): + return builder._GetResponseClass(method_descriptor) + + builder.cls = cls + cls.CallMethod = _WrapCallMethod + cls.GetDescriptor = staticmethod(lambda: builder.descriptor) + cls.GetDescriptor.__doc__ = 'Returns the service descriptor.' + cls.GetRequestClass = _WrapGetRequestClass + cls.GetResponseClass = _WrapGetResponseClass + for method in builder.descriptor.methods: + setattr(cls, method.name, builder._GenerateNonImplementedMethod(method)) + + def _CallMethod(self, srvc, method_descriptor, + rpc_controller, request, callback): + """Calls the method described by a given method descriptor. + + Args: + srvc: Instance of the service for which this method is called. + method_descriptor: Descriptor that represent the method to call. + rpc_controller: RPC controller to use for this method's execution. + request: Request protocol message. + callback: A callback to invoke after the method has completed. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'CallMethod() given method descriptor for wrong service type.') + method = getattr(srvc, method_descriptor.name) + return method(rpc_controller, request, callback) + + def _GetRequestClass(self, method_descriptor): + """Returns the class of the request protocol message. + + Args: + method_descriptor: Descriptor of the method for which to return the + request protocol message class. + + Returns: + A class that represents the input protocol message of the specified + method. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'GetRequestClass() given method descriptor for wrong service type.') + return method_descriptor.input_type._concrete_class + + def _GetResponseClass(self, method_descriptor): + """Returns the class of the response protocol message. + + Args: + method_descriptor: Descriptor of the method for which to return the + response protocol message class. + + Returns: + A class that represents the output protocol message of the specified + method. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'GetResponseClass() given method descriptor for wrong service type.') + return method_descriptor.output_type._concrete_class + + def _GenerateNonImplementedMethod(self, method): + """Generates and returns a method that can be set for a service methods. + + Args: + method: Descriptor of the service method for which a method is to be + generated. + + Returns: + A method that can be added to the service class. + """ + return lambda inst, rpc_controller, request, callback: ( + self._NonImplementedMethod(method.name, rpc_controller, callback)) + + def _NonImplementedMethod(self, method_name, rpc_controller, callback): + """The body of all methods in the generated service class. + + Args: + method_name: Name of the method being executed. + rpc_controller: RPC controller used to execute this method. + callback: A callback which will be invoked when the method finishes. + """ + rpc_controller.SetFailed('Method %s not implemented.' % method_name) + callback(None) + + +class _ServiceStubBuilder(object): + + """Constructs a protocol service stub class using a service descriptor. + + Given a service descriptor, this class constructs a suitable stub class. + A stub is just a type-safe wrapper around an RpcChannel which emulates a + local implementation of the service. + + One service stub builder instance constructs exactly one class. It means all + instances of that class share the same service stub builder. + """ + + def __init__(self, service_descriptor): + """Initializes an instance of the service stub class builder. + + Args: + service_descriptor: ServiceDescriptor to use when constructing the + stub class. + """ + self.descriptor = service_descriptor + + def BuildServiceStub(self, cls): + """Constructs the stub class. + + Args: + cls: The class that will be constructed. + """ + + def _ServiceStubInit(stub, rpc_channel): + stub.rpc_channel = rpc_channel + self.cls = cls + cls.__init__ = _ServiceStubInit + for method in self.descriptor.methods: + setattr(cls, method.name, self._GenerateStubMethod(method)) + + def _GenerateStubMethod(self, method): + return (lambda inst, rpc_controller, request, callback=None: + self._StubMethod(inst, method, rpc_controller, request, callback)) + + def _StubMethod(self, stub, method_descriptor, + rpc_controller, request, callback): + """The body of all service methods in the generated stub class. + + Args: + stub: Stub instance. + method_descriptor: Descriptor of the invoked method. + rpc_controller: Rpc controller to execute the method. + request: Request protocol message. + callback: A callback to execute when the method finishes. + Returns: + Response message (in case of blocking call). + """ + return stub.rpc_channel.CallMethod( + method_descriptor, rpc_controller, request, + method_descriptor.output_type._concrete_class, callback) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/source_context_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/source_context_pb2.py new file mode 100644 index 0000000000..30cca2e06e --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/source_context_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/source_context.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\"\"\n\rSourceContext\x12\x11\n\tfile_name\x18\x01 \x01(\tB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.source_context_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _SOURCECONTEXT._serialized_start=57 + _SOURCECONTEXT._serialized_end=91 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/struct_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/struct_pb2.py new file mode 100644 index 0000000000..149728ca08 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/struct_pb2.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/struct.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _STRUCT_FIELDSENTRY._options = None + _STRUCT_FIELDSENTRY._serialized_options = b'8\001' + _NULLVALUE._serialized_start=474 + _NULLVALUE._serialized_end=501 + _STRUCT._serialized_start=50 + _STRUCT._serialized_end=182 + _STRUCT_FIELDSENTRY._serialized_start=113 + _STRUCT_FIELDSENTRY._serialized_end=182 + _VALUE._serialized_start=185 + _VALUE._serialized_end=419 + _LISTVALUE._serialized_start=421 + _LISTVALUE._serialized_end=472 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/symbol_database.py b/openpype/hosts/nuke/vendor/google/protobuf/symbol_database.py new file mode 100644 index 0000000000..fdcf8cf06c --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/symbol_database.py @@ -0,0 +1,194 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""A database of Python protocol buffer generated symbols. + +SymbolDatabase is the MessageFactory for messages generated at compile time, +and makes it easy to create new instances of a registered type, given only the +type's protocol buffer symbol name. + +Example usage:: + + db = symbol_database.SymbolDatabase() + + # Register symbols of interest, from one or multiple files. + db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR) + db.RegisterMessage(my_proto_pb2.MyMessage) + db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR) + + # The database can be used as a MessageFactory, to generate types based on + # their name: + types = db.GetMessages(['my_proto.proto']) + my_message_instance = types['MyMessage']() + + # The database's underlying descriptor pool can be queried, so it's not + # necessary to know a type's filename to be able to generate it: + filename = db.pool.FindFileContainingSymbol('MyMessage') + my_message_instance = db.GetMessages([filename])['MyMessage']() + + # This functionality is also provided directly via a convenience method: + my_message_instance = db.GetSymbol('MyMessage')() +""" + + +from google.protobuf.internal import api_implementation +from google.protobuf import descriptor_pool +from google.protobuf import message_factory + + +class SymbolDatabase(message_factory.MessageFactory): + """A database of Python generated symbols.""" + + def RegisterMessage(self, message): + """Registers the given message type in the local database. + + Calls to GetSymbol() and GetMessages() will return messages registered here. + + Args: + message: A :class:`google.protobuf.message.Message` subclass (or + instance); its descriptor will be registered. + + Returns: + The provided message. + """ + + desc = message.DESCRIPTOR + self._classes[desc] = message + self.RegisterMessageDescriptor(desc) + return message + + def RegisterMessageDescriptor(self, message_descriptor): + """Registers the given message descriptor in the local database. + + Args: + message_descriptor (Descriptor): the message descriptor to add. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddDescriptor(message_descriptor) + + def RegisterEnumDescriptor(self, enum_descriptor): + """Registers the given enum descriptor in the local database. + + Args: + enum_descriptor (EnumDescriptor): The enum descriptor to register. + + Returns: + EnumDescriptor: The provided descriptor. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddEnumDescriptor(enum_descriptor) + return enum_descriptor + + def RegisterServiceDescriptor(self, service_descriptor): + """Registers the given service descriptor in the local database. + + Args: + service_descriptor (ServiceDescriptor): the service descriptor to + register. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddServiceDescriptor(service_descriptor) + + def RegisterFileDescriptor(self, file_descriptor): + """Registers the given file descriptor in the local database. + + Args: + file_descriptor (FileDescriptor): The file descriptor to register. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._InternalAddFileDescriptor(file_descriptor) + + def GetSymbol(self, symbol): + """Tries to find a symbol in the local database. + + Currently, this method only returns message.Message instances, however, if + may be extended in future to support other symbol types. + + Args: + symbol (str): a protocol buffer symbol. + + Returns: + A Python class corresponding to the symbol. + + Raises: + KeyError: if the symbol could not be found. + """ + + return self._classes[self.pool.FindMessageTypeByName(symbol)] + + def GetMessages(self, files): + # TODO(amauryfa): Fix the differences with MessageFactory. + """Gets all registered messages from a specified file. + + Only messages already created and registered will be returned; (this is the + case for imported _pb2 modules) + But unlike MessageFactory, this version also returns already defined nested + messages, but does not register any message extensions. + + Args: + files (list[str]): The file names to extract messages from. + + Returns: + A dictionary mapping proto names to the message classes. + + Raises: + KeyError: if a file could not be found. + """ + + def _GetAllMessages(desc): + """Walk a message Descriptor and recursively yields all message names.""" + yield desc + for msg_desc in desc.nested_types: + for nested_desc in _GetAllMessages(msg_desc): + yield nested_desc + + result = {} + for file_name in files: + file_desc = self.pool.FindFileByName(file_name) + for msg_desc in file_desc.message_types_by_name.values(): + for desc in _GetAllMessages(msg_desc): + try: + result[desc.full_name] = self._classes[desc] + except KeyError: + # This descriptor has no registered class, skip it. + pass + return result + + +_DEFAULT = SymbolDatabase(pool=descriptor_pool.Default()) + + +def Default(): + """Returns the default SymbolDatabase.""" + return _DEFAULT diff --git a/openpype/hosts/nuke/vendor/google/protobuf/text_encoding.py b/openpype/hosts/nuke/vendor/google/protobuf/text_encoding.py new file mode 100644 index 0000000000..759cf11f62 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/text_encoding.py @@ -0,0 +1,110 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Encoding related utilities.""" +import re + +_cescape_chr_to_symbol_map = {} +_cescape_chr_to_symbol_map[9] = r'\t' # optional escape +_cescape_chr_to_symbol_map[10] = r'\n' # optional escape +_cescape_chr_to_symbol_map[13] = r'\r' # optional escape +_cescape_chr_to_symbol_map[34] = r'\"' # necessary escape +_cescape_chr_to_symbol_map[39] = r"\'" # optional escape +_cescape_chr_to_symbol_map[92] = r'\\' # necessary escape + +# Lookup table for unicode +_cescape_unicode_to_str = [chr(i) for i in range(0, 256)] +for byte, string in _cescape_chr_to_symbol_map.items(): + _cescape_unicode_to_str[byte] = string + +# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32) +_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] + + [chr(i) for i in range(32, 127)] + + [r'\%03o' % i for i in range(127, 256)]) +for byte, string in _cescape_chr_to_symbol_map.items(): + _cescape_byte_to_str[byte] = string +del byte, string + + +def CEscape(text, as_utf8): + # type: (...) -> str + """Escape a bytes string for use in an text protocol buffer. + + Args: + text: A byte string to be escaped. + as_utf8: Specifies if result may contain non-ASCII characters. + In Python 3 this allows unescaped non-ASCII Unicode characters. + In Python 2 the return value will be valid UTF-8 rather than only ASCII. + Returns: + Escaped string (str). + """ + # Python's text.encode() 'string_escape' or 'unicode_escape' codecs do not + # satisfy our needs; they encodes unprintable characters using two-digit hex + # escapes whereas our C++ unescaping function allows hex escapes to be any + # length. So, "\0011".encode('string_escape') ends up being "\\x011", which + # will be decoded in C++ as a single-character string with char code 0x11. + text_is_unicode = isinstance(text, str) + if as_utf8 and text_is_unicode: + # We're already unicode, no processing beyond control char escapes. + return text.translate(_cescape_chr_to_symbol_map) + ord_ = ord if text_is_unicode else lambda x: x # bytes iterate as ints. + if as_utf8: + return ''.join(_cescape_unicode_to_str[ord_(c)] for c in text) + return ''.join(_cescape_byte_to_str[ord_(c)] for c in text) + + +_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])') + + +def CUnescape(text): + # type: (str) -> bytes + """Unescape a text string with C-style escape sequences to UTF-8 bytes. + + Args: + text: The data to parse in a str. + Returns: + A byte string. + """ + + def ReplaceHex(m): + # Only replace the match if the number of leading back slashes is odd. i.e. + # the slash itself is not escaped. + if len(m.group(1)) & 1: + return m.group(1) + 'x0' + m.group(2) + return m.group(0) + + # This is required because the 'string_escape' encoding doesn't + # allow single-digit hex escapes (like '\xf'). + result = _CUNESCAPE_HEX.sub(ReplaceHex, text) + + return (result.encode('utf-8') # Make it bytes to allow decode. + .decode('unicode_escape') + # Make it bytes again to return the proper type. + .encode('raw_unicode_escape')) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/text_format.py b/openpype/hosts/nuke/vendor/google/protobuf/text_format.py new file mode 100644 index 0000000000..412385c26f --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/text_format.py @@ -0,0 +1,1795 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains routines for printing protocol messages in text format. + +Simple usage example:: + + # Create a proto object and serialize it to a text proto string. + message = my_proto_pb2.MyMessage(foo='bar') + text_proto = text_format.MessageToString(message) + + # Parse a text proto string. + message = text_format.Parse(text_proto, my_proto_pb2.MyMessage()) +""" + +__author__ = 'kenton@google.com (Kenton Varda)' + +# TODO(b/129989314) Import thread contention leads to test failures. +import encodings.raw_unicode_escape # pylint: disable=unused-import +import encodings.unicode_escape # pylint: disable=unused-import +import io +import math +import re + +from google.protobuf.internal import decoder +from google.protobuf.internal import type_checkers +from google.protobuf import descriptor +from google.protobuf import text_encoding + +# pylint: disable=g-import-not-at-top +__all__ = ['MessageToString', 'Parse', 'PrintMessage', 'PrintField', + 'PrintFieldValue', 'Merge', 'MessageToBytes'] + +_INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(), + type_checkers.Int32ValueChecker(), + type_checkers.Uint64ValueChecker(), + type_checkers.Int64ValueChecker()) +_FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?$', re.IGNORECASE) +_FLOAT_NAN = re.compile('nanf?$', re.IGNORECASE) +_QUOTES = frozenset(("'", '"')) +_ANY_FULL_TYPE_NAME = 'google.protobuf.Any' + + +class Error(Exception): + """Top-level module error for text_format.""" + + +class ParseError(Error): + """Thrown in case of text parsing or tokenizing error.""" + + def __init__(self, message=None, line=None, column=None): + if message is not None and line is not None: + loc = str(line) + if column is not None: + loc += ':{0}'.format(column) + message = '{0} : {1}'.format(loc, message) + if message is not None: + super(ParseError, self).__init__(message) + else: + super(ParseError, self).__init__() + self._line = line + self._column = column + + def GetLine(self): + return self._line + + def GetColumn(self): + return self._column + + +class TextWriter(object): + + def __init__(self, as_utf8): + self._writer = io.StringIO() + + def write(self, val): + return self._writer.write(val) + + def close(self): + return self._writer.close() + + def getvalue(self): + return self._writer.getvalue() + + +def MessageToString( + message, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + indent=0, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + # type: (...) -> str + """Convert protobuf message to text format. + + Double values can be formatted compactly with 15 digits of + precision (which is the most that IEEE 754 "double" can guarantee) + using double_format='.15g'. To ensure that converting to text and back to a + proto will result in an identical value, double_format='.17g' should be used. + + Args: + message: The protocol buffers message. + as_utf8: Return unescaped Unicode for non-ASCII characters. + In Python 3 actual Unicode characters may appear as is in strings. + In Python 2 the return value will be valid UTF-8 rather than only ASCII. + as_one_line: Don't introduce newlines between fields. + use_short_repeated_primitives: Use short repeated format for primitives. + pointy_brackets: If True, use angle brackets instead of curly braces for + nesting. + use_index_order: If True, fields of a proto message will be printed using + the order defined in source code instead of the field number, extensions + will be printed at the end of the message and their relative order is + determined by the extension number. By default, use the field number + order. + float_format (str): If set, use this to specify float field formatting + (per the "Format Specification Mini-Language"); otherwise, shortest float + that has same value in wire will be printed. Also affect double field + if double_format is not set but float_format is set. + double_format (str): If set, use this to specify double field formatting + (per the "Format Specification Mini-Language"); if it is not set but + float_format is set, use float_format. Otherwise, use ``str()`` + use_field_number: If True, print field numbers instead of names. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + indent (int): The initial indent level, in terms of spaces, for pretty + print. + message_formatter (function(message, indent, as_one_line) -> unicode|None): + Custom formatter for selected sub-messages (usually based on message + type). Use to pretty print parts of the protobuf for easier diffing. + print_unknown_fields: If True, unknown fields will be printed. + force_colon: If set, a colon will be added after the field name even if the + field is a proto message. + + Returns: + str: A string of the text formatted protocol buffer message. + """ + out = TextWriter(as_utf8) + printer = _Printer( + out, + indent, + as_utf8, + as_one_line, + use_short_repeated_primitives, + pointy_brackets, + use_index_order, + float_format, + double_format, + use_field_number, + descriptor_pool, + message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintMessage(message) + result = out.getvalue() + out.close() + if as_one_line: + return result.rstrip() + return result + + +def MessageToBytes(message, **kwargs): + # type: (...) -> bytes + """Convert protobuf message to encoded text format. See MessageToString.""" + text = MessageToString(message, **kwargs) + if isinstance(text, bytes): + return text + codec = 'utf-8' if kwargs.get('as_utf8') else 'ascii' + return text.encode(codec) + + +def _IsMapEntry(field): + return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.message_type.has_options and + field.message_type.GetOptions().map_entry) + + +def PrintMessage(message, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + printer = _Printer( + out=out, indent=indent, as_utf8=as_utf8, + as_one_line=as_one_line, + use_short_repeated_primitives=use_short_repeated_primitives, + pointy_brackets=pointy_brackets, + use_index_order=use_index_order, + float_format=float_format, + double_format=double_format, + use_field_number=use_field_number, + descriptor_pool=descriptor_pool, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintMessage(message) + + +def PrintField(field, + value, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Print a single field name/value pair.""" + printer = _Printer(out, indent, as_utf8, as_one_line, + use_short_repeated_primitives, pointy_brackets, + use_index_order, float_format, double_format, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintField(field, value) + + +def PrintFieldValue(field, + value, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Print a single field value (not including name).""" + printer = _Printer(out, indent, as_utf8, as_one_line, + use_short_repeated_primitives, pointy_brackets, + use_index_order, float_format, double_format, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintFieldValue(field, value) + + +def _BuildMessageFromTypeName(type_name, descriptor_pool): + """Returns a protobuf message instance. + + Args: + type_name: Fully-qualified protobuf message type name string. + descriptor_pool: DescriptorPool instance. + + Returns: + A Message instance of type matching type_name, or None if the a Descriptor + wasn't found matching type_name. + """ + # pylint: disable=g-import-not-at-top + if descriptor_pool is None: + from google.protobuf import descriptor_pool as pool_mod + descriptor_pool = pool_mod.Default() + from google.protobuf import symbol_database + database = symbol_database.Default() + try: + message_descriptor = descriptor_pool.FindMessageTypeByName(type_name) + except KeyError: + return None + message_type = database.GetPrototype(message_descriptor) + return message_type() + + +# These values must match WireType enum in google/protobuf/wire_format.h. +WIRETYPE_LENGTH_DELIMITED = 2 +WIRETYPE_START_GROUP = 3 + + +class _Printer(object): + """Text format printer for protocol message.""" + + def __init__( + self, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Initialize the Printer. + + Double values can be formatted compactly with 15 digits of precision + (which is the most that IEEE 754 "double" can guarantee) using + double_format='.15g'. To ensure that converting to text and back to a proto + will result in an identical value, double_format='.17g' should be used. + + Args: + out: To record the text format result. + indent: The initial indent level for pretty print. + as_utf8: Return unescaped Unicode for non-ASCII characters. + In Python 3 actual Unicode characters may appear as is in strings. + In Python 2 the return value will be valid UTF-8 rather than ASCII. + as_one_line: Don't introduce newlines between fields. + use_short_repeated_primitives: Use short repeated format for primitives. + pointy_brackets: If True, use angle brackets instead of curly braces for + nesting. + use_index_order: If True, print fields of a proto message using the order + defined in source code instead of the field number. By default, use the + field number order. + float_format: If set, use this to specify float field formatting + (per the "Format Specification Mini-Language"); otherwise, shortest + float that has same value in wire will be printed. Also affect double + field if double_format is not set but float_format is set. + double_format: If set, use this to specify double field formatting + (per the "Format Specification Mini-Language"); if it is not set but + float_format is set, use float_format. Otherwise, str() is used. + use_field_number: If True, print field numbers instead of names. + descriptor_pool: A DescriptorPool used to resolve Any types. + message_formatter: A function(message, indent, as_one_line): unicode|None + to custom format selected sub-messages (usually based on message type). + Use to pretty print parts of the protobuf for easier diffing. + print_unknown_fields: If True, unknown fields will be printed. + force_colon: If set, a colon will be added after the field name even if + the field is a proto message. + """ + self.out = out + self.indent = indent + self.as_utf8 = as_utf8 + self.as_one_line = as_one_line + self.use_short_repeated_primitives = use_short_repeated_primitives + self.pointy_brackets = pointy_brackets + self.use_index_order = use_index_order + self.float_format = float_format + if double_format is not None: + self.double_format = double_format + else: + self.double_format = float_format + self.use_field_number = use_field_number + self.descriptor_pool = descriptor_pool + self.message_formatter = message_formatter + self.print_unknown_fields = print_unknown_fields + self.force_colon = force_colon + + def _TryPrintAsAnyMessage(self, message): + """Serializes if message is a google.protobuf.Any field.""" + if '/' not in message.type_url: + return False + packed_message = _BuildMessageFromTypeName(message.TypeName(), + self.descriptor_pool) + if packed_message: + packed_message.MergeFromString(message.value) + colon = ':' if self.force_colon else '' + self.out.write('%s[%s]%s ' % (self.indent * ' ', message.type_url, colon)) + self._PrintMessageFieldValue(packed_message) + self.out.write(' ' if self.as_one_line else '\n') + return True + else: + return False + + def _TryCustomFormatMessage(self, message): + formatted = self.message_formatter(message, self.indent, self.as_one_line) + if formatted is None: + return False + + out = self.out + out.write(' ' * self.indent) + out.write(formatted) + out.write(' ' if self.as_one_line else '\n') + return True + + def PrintMessage(self, message): + """Convert protobuf message to text format. + + Args: + message: The protocol buffers message. + """ + if self.message_formatter and self._TryCustomFormatMessage(message): + return + if (message.DESCRIPTOR.full_name == _ANY_FULL_TYPE_NAME and + self._TryPrintAsAnyMessage(message)): + return + fields = message.ListFields() + if self.use_index_order: + fields.sort( + key=lambda x: x[0].number if x[0].is_extension else x[0].index) + for field, value in fields: + if _IsMapEntry(field): + for key in sorted(value): + # This is slow for maps with submessage entries because it copies the + # entire tree. Unfortunately this would take significant refactoring + # of this file to work around. + # + # TODO(haberman): refactor and optimize if this becomes an issue. + entry_submsg = value.GetEntryClass()(key=key, value=value[key]) + self.PrintField(field, entry_submsg) + elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if (self.use_short_repeated_primitives + and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE + and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_STRING): + self._PrintShortRepeatedPrimitivesValue(field, value) + else: + for element in value: + self.PrintField(field, element) + else: + self.PrintField(field, value) + + if self.print_unknown_fields: + self._PrintUnknownFields(message.UnknownFields()) + + def _PrintUnknownFields(self, unknown_fields): + """Print unknown fields.""" + out = self.out + for field in unknown_fields: + out.write(' ' * self.indent) + out.write(str(field.field_number)) + if field.wire_type == WIRETYPE_START_GROUP: + if self.as_one_line: + out.write(' { ') + else: + out.write(' {\n') + self.indent += 2 + + self._PrintUnknownFields(field.data) + + if self.as_one_line: + out.write('} ') + else: + self.indent -= 2 + out.write(' ' * self.indent + '}\n') + elif field.wire_type == WIRETYPE_LENGTH_DELIMITED: + try: + # If this field is parseable as a Message, it is probably + # an embedded message. + # pylint: disable=protected-access + (embedded_unknown_message, pos) = decoder._DecodeUnknownFieldSet( + memoryview(field.data), 0, len(field.data)) + except Exception: # pylint: disable=broad-except + pos = 0 + + if pos == len(field.data): + if self.as_one_line: + out.write(' { ') + else: + out.write(' {\n') + self.indent += 2 + + self._PrintUnknownFields(embedded_unknown_message) + + if self.as_one_line: + out.write('} ') + else: + self.indent -= 2 + out.write(' ' * self.indent + '}\n') + else: + # A string or bytes field. self.as_utf8 may not work. + out.write(': \"') + out.write(text_encoding.CEscape(field.data, False)) + out.write('\" ' if self.as_one_line else '\"\n') + else: + # varint, fixed32, fixed64 + out.write(': ') + out.write(str(field.data)) + out.write(' ' if self.as_one_line else '\n') + + def _PrintFieldName(self, field): + """Print field name.""" + out = self.out + out.write(' ' * self.indent) + if self.use_field_number: + out.write(str(field.number)) + else: + if field.is_extension: + out.write('[') + if (field.containing_type.GetOptions().message_set_wire_format and + field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL): + out.write(field.message_type.full_name) + else: + out.write(field.full_name) + out.write(']') + elif field.type == descriptor.FieldDescriptor.TYPE_GROUP: + # For groups, use the capitalized name. + out.write(field.message_type.name) + else: + out.write(field.name) + + if (self.force_colon or + field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE): + # The colon is optional in this case, but our cross-language golden files + # don't include it. Here, the colon is only included if force_colon is + # set to True + out.write(':') + + def PrintField(self, field, value): + """Print a single field name/value pair.""" + self._PrintFieldName(field) + self.out.write(' ') + self.PrintFieldValue(field, value) + self.out.write(' ' if self.as_one_line else '\n') + + def _PrintShortRepeatedPrimitivesValue(self, field, value): + """"Prints short repeated primitives value.""" + # Note: this is called only when value has at least one element. + self._PrintFieldName(field) + self.out.write(' [') + for i in range(len(value) - 1): + self.PrintFieldValue(field, value[i]) + self.out.write(', ') + self.PrintFieldValue(field, value[-1]) + self.out.write(']') + self.out.write(' ' if self.as_one_line else '\n') + + def _PrintMessageFieldValue(self, value): + if self.pointy_brackets: + openb = '<' + closeb = '>' + else: + openb = '{' + closeb = '}' + + if self.as_one_line: + self.out.write('%s ' % openb) + self.PrintMessage(value) + self.out.write(closeb) + else: + self.out.write('%s\n' % openb) + self.indent += 2 + self.PrintMessage(value) + self.indent -= 2 + self.out.write(' ' * self.indent + closeb) + + def PrintFieldValue(self, field, value): + """Print a single field value (not including name). + + For repeated fields, the value should be a single element. + + Args: + field: The descriptor of the field to be printed. + value: The value of the field. + """ + out = self.out + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + self._PrintMessageFieldValue(value) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: + enum_value = field.enum_type.values_by_number.get(value, None) + if enum_value is not None: + out.write(enum_value.name) + else: + out.write(str(value)) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: + out.write('\"') + if isinstance(value, str) and not self.as_utf8: + out_value = value.encode('utf-8') + else: + out_value = value + if field.type == descriptor.FieldDescriptor.TYPE_BYTES: + # We always need to escape all binary data in TYPE_BYTES fields. + out_as_utf8 = False + else: + out_as_utf8 = self.as_utf8 + out.write(text_encoding.CEscape(out_value, out_as_utf8)) + out.write('\"') + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: + if value: + out.write('true') + else: + out.write('false') + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: + if self.float_format is not None: + out.write('{1:{0}}'.format(self.float_format, value)) + else: + if math.isnan(value): + out.write(str(value)) + else: + out.write(str(type_checkers.ToShortestFloat(value))) + elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_DOUBLE and + self.double_format is not None): + out.write('{1:{0}}'.format(self.double_format, value)) + else: + out.write(str(value)) + + +def Parse(text, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + NOTE: for historical reasons this function does not clear the input + message. This is different from what the binary msg.ParseFrom(...) does. + If text contains a field already set in message, the value is appended if the + field is repeated. Otherwise, an error is raised. + + Example:: + + a = MyProto() + a.repeated_field.append('test') + b = MyProto() + + # Repeated fields are combined + text_format.Parse(repr(a), b) + text_format.Parse(repr(a), b) # repeated_field contains ["test", "test"] + + # Non-repeated fields cannot be overwritten + a.singular_field = 1 + b.singular_field = 2 + text_format.Parse(repr(a), b) # ParseError + + # Binary version: + b.ParseFromString(a.SerializeToString()) # repeated_field is now "test" + + Caller is responsible for clearing the message as needed. + + Args: + text (str): Message text representation. + message (Message): A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + Message: The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + return ParseLines(text.split(b'\n' if isinstance(text, bytes) else u'\n'), + message, + allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + + +def Merge(text, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + Like Parse(), but allows repeated values for a non-repeated field, and uses + the last one. This means any non-repeated, top-level fields specified in text + replace those in the message. + + Args: + text (str): Message text representation. + message (Message): A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + Message: The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + return MergeLines( + text.split(b'\n' if isinstance(text, bytes) else u'\n'), + message, + allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + + +def ParseLines(lines, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + See Parse() for caveats. + + Args: + lines: An iterable of lines of a message's text representation. + message: A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool: A DescriptorPool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + parser = _Parser(allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + return parser.ParseLines(lines, message) + + +def MergeLines(lines, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + See Merge() for more details. + + Args: + lines: An iterable of lines of a message's text representation. + message: A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool: A DescriptorPool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + parser = _Parser(allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + return parser.MergeLines(lines, message) + + +class _Parser(object): + """Text format parser for protocol message.""" + + def __init__(self, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + self.allow_unknown_extension = allow_unknown_extension + self.allow_field_number = allow_field_number + self.descriptor_pool = descriptor_pool + self.allow_unknown_field = allow_unknown_field + + def ParseLines(self, lines, message): + """Parses a text representation of a protocol message into a message.""" + self._allow_multiple_scalars = False + self._ParseOrMerge(lines, message) + return message + + def MergeLines(self, lines, message): + """Merges a text representation of a protocol message into a message.""" + self._allow_multiple_scalars = True + self._ParseOrMerge(lines, message) + return message + + def _ParseOrMerge(self, lines, message): + """Converts a text representation of a protocol message into a message. + + Args: + lines: Lines of a message's text representation. + message: A protocol buffer message to merge into. + + Raises: + ParseError: On text parsing problems. + """ + # Tokenize expects native str lines. + str_lines = ( + line if isinstance(line, str) else line.decode('utf-8') + for line in lines) + tokenizer = Tokenizer(str_lines) + while not tokenizer.AtEnd(): + self._MergeField(tokenizer, message) + + def _MergeField(self, tokenizer, message): + """Merges a single protocol message field into a message. + + Args: + tokenizer: A tokenizer to parse the field name and values. + message: A protocol message to record the data. + + Raises: + ParseError: In case of text parsing problems. + """ + message_descriptor = message.DESCRIPTOR + if (message_descriptor.full_name == _ANY_FULL_TYPE_NAME and + tokenizer.TryConsume('[')): + type_url_prefix, packed_type_name = self._ConsumeAnyTypeUrl(tokenizer) + tokenizer.Consume(']') + tokenizer.TryConsume(':') + if tokenizer.TryConsume('<'): + expanded_any_end_token = '>' + else: + tokenizer.Consume('{') + expanded_any_end_token = '}' + expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name, + self.descriptor_pool) + if not expanded_any_sub_message: + raise ParseError('Type %s not found in descriptor pool' % + packed_type_name) + while not tokenizer.TryConsume(expanded_any_end_token): + if tokenizer.AtEnd(): + raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % + (expanded_any_end_token,)) + self._MergeField(tokenizer, expanded_any_sub_message) + deterministic = False + + message.Pack(expanded_any_sub_message, + type_url_prefix=type_url_prefix, + deterministic=deterministic) + return + + if tokenizer.TryConsume('['): + name = [tokenizer.ConsumeIdentifier()] + while tokenizer.TryConsume('.'): + name.append(tokenizer.ConsumeIdentifier()) + name = '.'.join(name) + + if not message_descriptor.is_extendable: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" does not have extensions.' % + message_descriptor.full_name) + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(name) + # pylint: enable=protected-access + + + if not field: + if self.allow_unknown_extension: + field = None + else: + raise tokenizer.ParseErrorPreviousToken( + 'Extension "%s" not registered. ' + 'Did you import the _pb2 module which defines it? ' + 'If you are trying to place the extension in the MessageSet ' + 'field of another message that is in an Any or MessageSet field, ' + 'that message\'s _pb2 module must be imported as well' % name) + elif message_descriptor != field.containing_type: + raise tokenizer.ParseErrorPreviousToken( + 'Extension "%s" does not extend message type "%s".' % + (name, message_descriptor.full_name)) + + tokenizer.Consume(']') + + else: + name = tokenizer.ConsumeIdentifierOrNumber() + if self.allow_field_number and name.isdigit(): + number = ParseInteger(name, True, True) + field = message_descriptor.fields_by_number.get(number, None) + if not field and message_descriptor.is_extendable: + field = message.Extensions._FindExtensionByNumber(number) + else: + field = message_descriptor.fields_by_name.get(name, None) + + # Group names are expected to be capitalized as they appear in the + # .proto file, which actually matches their type names, not their field + # names. + if not field: + field = message_descriptor.fields_by_name.get(name.lower(), None) + if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP: + field = None + + if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and + field.message_type.name != name): + field = None + + if not field and not self.allow_unknown_field: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" has no field named "%s".' % + (message_descriptor.full_name, name)) + + if field: + if not self._allow_multiple_scalars and field.containing_oneof: + # Check if there's a different field set in this oneof. + # Note that we ignore the case if the same field was set before, and we + # apply _allow_multiple_scalars to non-scalar fields as well. + which_oneof = message.WhichOneof(field.containing_oneof.name) + if which_oneof is not None and which_oneof != field.name: + raise tokenizer.ParseErrorPreviousToken( + 'Field "%s" is specified along with field "%s", another member ' + 'of oneof "%s" for message type "%s".' % + (field.name, which_oneof, field.containing_oneof.name, + message_descriptor.full_name)) + + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + tokenizer.TryConsume(':') + merger = self._MergeMessageField + else: + tokenizer.Consume(':') + merger = self._MergeScalarField + + if (field.label == descriptor.FieldDescriptor.LABEL_REPEATED and + tokenizer.TryConsume('[')): + # Short repeated format, e.g. "foo: [1, 2, 3]" + if not tokenizer.TryConsume(']'): + while True: + merger(tokenizer, message, field) + if tokenizer.TryConsume(']'): + break + tokenizer.Consume(',') + + else: + merger(tokenizer, message, field) + + else: # Proto field is unknown. + assert (self.allow_unknown_extension or self.allow_unknown_field) + _SkipFieldContents(tokenizer) + + # For historical reasons, fields may optionally be separated by commas or + # semicolons. + if not tokenizer.TryConsume(','): + tokenizer.TryConsume(';') + + + def _ConsumeAnyTypeUrl(self, tokenizer): + """Consumes a google.protobuf.Any type URL and returns the type name.""" + # Consume "type.googleapis.com/". + prefix = [tokenizer.ConsumeIdentifier()] + tokenizer.Consume('.') + prefix.append(tokenizer.ConsumeIdentifier()) + tokenizer.Consume('.') + prefix.append(tokenizer.ConsumeIdentifier()) + tokenizer.Consume('/') + # Consume the fully-qualified type name. + name = [tokenizer.ConsumeIdentifier()] + while tokenizer.TryConsume('.'): + name.append(tokenizer.ConsumeIdentifier()) + return '.'.join(prefix), '.'.join(name) + + def _MergeMessageField(self, tokenizer, message, field): + """Merges a single scalar field into a message. + + Args: + tokenizer: A tokenizer to parse the field value. + message: The message of which field is a member. + field: The descriptor of the field to be merged. + + Raises: + ParseError: In case of text parsing problems. + """ + is_map_entry = _IsMapEntry(field) + + if tokenizer.TryConsume('<'): + end_token = '>' + else: + tokenizer.Consume('{') + end_token = '}' + + if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if field.is_extension: + sub_message = message.Extensions[field].add() + elif is_map_entry: + sub_message = getattr(message, field.name).GetEntryClass()() + else: + sub_message = getattr(message, field.name).add() + else: + if field.is_extension: + if (not self._allow_multiple_scalars and + message.HasExtension(field)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" extensions.' % + (message.DESCRIPTOR.full_name, field.full_name)) + sub_message = message.Extensions[field] + else: + # Also apply _allow_multiple_scalars to message field. + # TODO(jieluo): Change to _allow_singular_overwrites. + if (not self._allow_multiple_scalars and + message.HasField(field.name)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" fields.' % + (message.DESCRIPTOR.full_name, field.name)) + sub_message = getattr(message, field.name) + sub_message.SetInParent() + + while not tokenizer.TryConsume(end_token): + if tokenizer.AtEnd(): + raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,)) + self._MergeField(tokenizer, sub_message) + + if is_map_entry: + value_cpptype = field.message_type.fields_by_name['value'].cpp_type + if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + value = getattr(message, field.name)[sub_message.key] + value.CopyFrom(sub_message.value) + else: + getattr(message, field.name)[sub_message.key] = sub_message.value + + @staticmethod + def _IsProto3Syntax(message): + message_descriptor = message.DESCRIPTOR + return (hasattr(message_descriptor, 'syntax') and + message_descriptor.syntax == 'proto3') + + def _MergeScalarField(self, tokenizer, message, field): + """Merges a single scalar field into a message. + + Args: + tokenizer: A tokenizer to parse the field value. + message: A protocol message to record the data. + field: The descriptor of the field to be merged. + + Raises: + ParseError: In case of text parsing problems. + RuntimeError: On runtime errors. + """ + _ = self.allow_unknown_extension + value = None + + if field.type in (descriptor.FieldDescriptor.TYPE_INT32, + descriptor.FieldDescriptor.TYPE_SINT32, + descriptor.FieldDescriptor.TYPE_SFIXED32): + value = _ConsumeInt32(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_INT64, + descriptor.FieldDescriptor.TYPE_SINT64, + descriptor.FieldDescriptor.TYPE_SFIXED64): + value = _ConsumeInt64(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32, + descriptor.FieldDescriptor.TYPE_FIXED32): + value = _ConsumeUint32(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64, + descriptor.FieldDescriptor.TYPE_FIXED64): + value = _ConsumeUint64(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT, + descriptor.FieldDescriptor.TYPE_DOUBLE): + value = tokenizer.ConsumeFloat() + elif field.type == descriptor.FieldDescriptor.TYPE_BOOL: + value = tokenizer.ConsumeBool() + elif field.type == descriptor.FieldDescriptor.TYPE_STRING: + value = tokenizer.ConsumeString() + elif field.type == descriptor.FieldDescriptor.TYPE_BYTES: + value = tokenizer.ConsumeByteString() + elif field.type == descriptor.FieldDescriptor.TYPE_ENUM: + value = tokenizer.ConsumeEnum(field) + else: + raise RuntimeError('Unknown field type %d' % field.type) + + if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if field.is_extension: + message.Extensions[field].append(value) + else: + getattr(message, field.name).append(value) + else: + if field.is_extension: + if (not self._allow_multiple_scalars and + not self._IsProto3Syntax(message) and + message.HasExtension(field)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" extensions.' % + (message.DESCRIPTOR.full_name, field.full_name)) + else: + message.Extensions[field] = value + else: + duplicate_error = False + if not self._allow_multiple_scalars: + if self._IsProto3Syntax(message): + # Proto3 doesn't represent presence so we try best effort to check + # multiple scalars by compare to default values. + duplicate_error = bool(getattr(message, field.name)) + else: + duplicate_error = message.HasField(field.name) + + if duplicate_error: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" fields.' % + (message.DESCRIPTOR.full_name, field.name)) + else: + setattr(message, field.name, value) + + +def _SkipFieldContents(tokenizer): + """Skips over contents (value or message) of a field. + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + # Try to guess the type of this field. + # If this field is not a message, there should be a ":" between the + # field name and the field value and also the field value should not + # start with "{" or "<" which indicates the beginning of a message body. + # If there is no ":" or there is a "{" or "<" after ":", this field has + # to be a message or the input is ill-formed. + if tokenizer.TryConsume(':') and not tokenizer.LookingAt( + '{') and not tokenizer.LookingAt('<'): + _SkipFieldValue(tokenizer) + else: + _SkipFieldMessage(tokenizer) + + +def _SkipField(tokenizer): + """Skips over a complete field (name and value/message). + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + if tokenizer.TryConsume('['): + # Consume extension name. + tokenizer.ConsumeIdentifier() + while tokenizer.TryConsume('.'): + tokenizer.ConsumeIdentifier() + tokenizer.Consume(']') + else: + tokenizer.ConsumeIdentifierOrNumber() + + _SkipFieldContents(tokenizer) + + # For historical reasons, fields may optionally be separated by commas or + # semicolons. + if not tokenizer.TryConsume(','): + tokenizer.TryConsume(';') + + +def _SkipFieldMessage(tokenizer): + """Skips over a field message. + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + + if tokenizer.TryConsume('<'): + delimiter = '>' + else: + tokenizer.Consume('{') + delimiter = '}' + + while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'): + _SkipField(tokenizer) + + tokenizer.Consume(delimiter) + + +def _SkipFieldValue(tokenizer): + """Skips over a field value. + + Args: + tokenizer: A tokenizer to parse the field name and values. + + Raises: + ParseError: In case an invalid field value is found. + """ + # String/bytes tokens can come in multiple adjacent string literals. + # If we can consume one, consume as many as we can. + if tokenizer.TryConsumeByteString(): + while tokenizer.TryConsumeByteString(): + pass + return + + if (not tokenizer.TryConsumeIdentifier() and + not _TryConsumeInt64(tokenizer) and not _TryConsumeUint64(tokenizer) and + not tokenizer.TryConsumeFloat()): + raise ParseError('Invalid field value: ' + tokenizer.token) + + +class Tokenizer(object): + """Protocol buffer text representation tokenizer. + + This class handles the lower level string parsing by splitting it into + meaningful tokens. + + It was directly ported from the Java protocol buffer API. + """ + + _WHITESPACE = re.compile(r'\s+') + _COMMENT = re.compile(r'(\s*#.*$)', re.MULTILINE) + _WHITESPACE_OR_COMMENT = re.compile(r'(\s|(#.*$))+', re.MULTILINE) + _TOKEN = re.compile('|'.join([ + r'[a-zA-Z_][0-9a-zA-Z_+-]*', # an identifier + r'([0-9+-]|(\.[0-9]))[0-9a-zA-Z_.+-]*', # a number + ] + [ # quoted str for each quote mark + # Avoid backtracking! https://stackoverflow.com/a/844267 + r'{qt}[^{qt}\n\\]*((\\.)+[^{qt}\n\\]*)*({qt}|\\?$)'.format(qt=mark) + for mark in _QUOTES + ])) + + _IDENTIFIER = re.compile(r'[^\d\W]\w*') + _IDENTIFIER_OR_NUMBER = re.compile(r'\w+') + + def __init__(self, lines, skip_comments=True): + self._position = 0 + self._line = -1 + self._column = 0 + self._token_start = None + self.token = '' + self._lines = iter(lines) + self._current_line = '' + self._previous_line = 0 + self._previous_column = 0 + self._more_lines = True + self._skip_comments = skip_comments + self._whitespace_pattern = (skip_comments and self._WHITESPACE_OR_COMMENT + or self._WHITESPACE) + self._SkipWhitespace() + self.NextToken() + + def LookingAt(self, token): + return self.token == token + + def AtEnd(self): + """Checks the end of the text was reached. + + Returns: + True iff the end was reached. + """ + return not self.token + + def _PopLine(self): + while len(self._current_line) <= self._column: + try: + self._current_line = next(self._lines) + except StopIteration: + self._current_line = '' + self._more_lines = False + return + else: + self._line += 1 + self._column = 0 + + def _SkipWhitespace(self): + while True: + self._PopLine() + match = self._whitespace_pattern.match(self._current_line, self._column) + if not match: + break + length = len(match.group(0)) + self._column += length + + def TryConsume(self, token): + """Tries to consume a given piece of text. + + Args: + token: Text to consume. + + Returns: + True iff the text was consumed. + """ + if self.token == token: + self.NextToken() + return True + return False + + def Consume(self, token): + """Consumes a piece of text. + + Args: + token: Text to consume. + + Raises: + ParseError: If the text couldn't be consumed. + """ + if not self.TryConsume(token): + raise self.ParseError('Expected "%s".' % token) + + def ConsumeComment(self): + result = self.token + if not self._COMMENT.match(result): + raise self.ParseError('Expected comment.') + self.NextToken() + return result + + def ConsumeCommentOrTrailingComment(self): + """Consumes a comment, returns a 2-tuple (trailing bool, comment str).""" + + # Tokenizer initializes _previous_line and _previous_column to 0. As the + # tokenizer starts, it looks like there is a previous token on the line. + just_started = self._line == 0 and self._column == 0 + + before_parsing = self._previous_line + comment = self.ConsumeComment() + + # A trailing comment is a comment on the same line than the previous token. + trailing = (self._previous_line == before_parsing + and not just_started) + + return trailing, comment + + def TryConsumeIdentifier(self): + try: + self.ConsumeIdentifier() + return True + except ParseError: + return False + + def ConsumeIdentifier(self): + """Consumes protocol message field identifier. + + Returns: + Identifier string. + + Raises: + ParseError: If an identifier couldn't be consumed. + """ + result = self.token + if not self._IDENTIFIER.match(result): + raise self.ParseError('Expected identifier.') + self.NextToken() + return result + + def TryConsumeIdentifierOrNumber(self): + try: + self.ConsumeIdentifierOrNumber() + return True + except ParseError: + return False + + def ConsumeIdentifierOrNumber(self): + """Consumes protocol message field identifier. + + Returns: + Identifier string. + + Raises: + ParseError: If an identifier couldn't be consumed. + """ + result = self.token + if not self._IDENTIFIER_OR_NUMBER.match(result): + raise self.ParseError('Expected identifier or number, got %s.' % result) + self.NextToken() + return result + + def TryConsumeInteger(self): + try: + self.ConsumeInteger() + return True + except ParseError: + return False + + def ConsumeInteger(self): + """Consumes an integer number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an integer couldn't be consumed. + """ + try: + result = _ParseAbstractInteger(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def TryConsumeFloat(self): + try: + self.ConsumeFloat() + return True + except ParseError: + return False + + def ConsumeFloat(self): + """Consumes an floating point number. + + Returns: + The number parsed. + + Raises: + ParseError: If a floating point number couldn't be consumed. + """ + try: + result = ParseFloat(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ConsumeBool(self): + """Consumes a boolean value. + + Returns: + The bool parsed. + + Raises: + ParseError: If a boolean value couldn't be consumed. + """ + try: + result = ParseBool(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def TryConsumeByteString(self): + try: + self.ConsumeByteString() + return True + except ParseError: + return False + + def ConsumeString(self): + """Consumes a string value. + + Returns: + The string parsed. + + Raises: + ParseError: If a string value couldn't be consumed. + """ + the_bytes = self.ConsumeByteString() + try: + return str(the_bytes, 'utf-8') + except UnicodeDecodeError as e: + raise self._StringParseError(e) + + def ConsumeByteString(self): + """Consumes a byte array value. + + Returns: + The array parsed (as a string). + + Raises: + ParseError: If a byte array value couldn't be consumed. + """ + the_list = [self._ConsumeSingleByteString()] + while self.token and self.token[0] in _QUOTES: + the_list.append(self._ConsumeSingleByteString()) + return b''.join(the_list) + + def _ConsumeSingleByteString(self): + """Consume one token of a string literal. + + String literals (whether bytes or text) can come in multiple adjacent + tokens which are automatically concatenated, like in C or Python. This + method only consumes one token. + + Returns: + The token parsed. + Raises: + ParseError: When the wrong format data is found. + """ + text = self.token + if len(text) < 1 or text[0] not in _QUOTES: + raise self.ParseError('Expected string but found: %r' % (text,)) + + if len(text) < 2 or text[-1] != text[0]: + raise self.ParseError('String missing ending quote: %r' % (text,)) + + try: + result = text_encoding.CUnescape(text[1:-1]) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ConsumeEnum(self, field): + try: + result = ParseEnum(field, self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ParseErrorPreviousToken(self, message): + """Creates and *returns* a ParseError for the previously read token. + + Args: + message: A message to set for the exception. + + Returns: + A ParseError instance. + """ + return ParseError(message, self._previous_line + 1, + self._previous_column + 1) + + def ParseError(self, message): + """Creates and *returns* a ParseError for the current token.""" + return ParseError('\'' + self._current_line + '\': ' + message, + self._line + 1, self._column + 1) + + def _StringParseError(self, e): + return self.ParseError('Couldn\'t parse string: ' + str(e)) + + def NextToken(self): + """Reads the next meaningful token.""" + self._previous_line = self._line + self._previous_column = self._column + + self._column += len(self.token) + self._SkipWhitespace() + + if not self._more_lines: + self.token = '' + return + + match = self._TOKEN.match(self._current_line, self._column) + if not match and not self._skip_comments: + match = self._COMMENT.match(self._current_line, self._column) + if match: + token = match.group(0) + self.token = token + else: + self.token = self._current_line[self._column] + +# Aliased so it can still be accessed by current visibility violators. +# TODO(dbarnett): Migrate violators to textformat_tokenizer. +_Tokenizer = Tokenizer # pylint: disable=invalid-name + + +def _ConsumeInt32(tokenizer): + """Consumes a signed 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If a signed 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=True, is_long=False) + + +def _ConsumeUint32(tokenizer): + """Consumes an unsigned 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an unsigned 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=False, is_long=False) + + +def _TryConsumeInt64(tokenizer): + try: + _ConsumeInt64(tokenizer) + return True + except ParseError: + return False + + +def _ConsumeInt64(tokenizer): + """Consumes a signed 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If a signed 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=True, is_long=True) + + +def _TryConsumeUint64(tokenizer): + try: + _ConsumeUint64(tokenizer) + return True + except ParseError: + return False + + +def _ConsumeUint64(tokenizer): + """Consumes an unsigned 64bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an unsigned 64bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=False, is_long=True) + + +def _ConsumeInteger(tokenizer, is_signed=False, is_long=False): + """Consumes an integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + is_signed: True if a signed integer must be parsed. + is_long: True if a long integer must be parsed. + + Returns: + The integer parsed. + + Raises: + ParseError: If an integer with given characteristics couldn't be consumed. + """ + try: + result = ParseInteger(tokenizer.token, is_signed=is_signed, is_long=is_long) + except ValueError as e: + raise tokenizer.ParseError(str(e)) + tokenizer.NextToken() + return result + + +def ParseInteger(text, is_signed=False, is_long=False): + """Parses an integer. + + Args: + text: The text to parse. + is_signed: True if a signed integer must be parsed. + is_long: True if a long integer must be parsed. + + Returns: + The integer value. + + Raises: + ValueError: Thrown Iff the text is not a valid integer. + """ + # Do the actual parsing. Exception handling is propagated to caller. + result = _ParseAbstractInteger(text) + + # Check if the integer is sane. Exceptions handled by callers. + checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)] + checker.CheckValue(result) + return result + + +def _ParseAbstractInteger(text): + """Parses an integer without checking size/signedness. + + Args: + text: The text to parse. + + Returns: + The integer value. + + Raises: + ValueError: Thrown Iff the text is not a valid integer. + """ + # Do the actual parsing. Exception handling is propagated to caller. + orig_text = text + c_octal_match = re.match(r'(-?)0(\d+)$', text) + if c_octal_match: + # Python 3 no longer supports 0755 octal syntax without the 'o', so + # we always use the '0o' prefix for multi-digit numbers starting with 0. + text = c_octal_match.group(1) + '0o' + c_octal_match.group(2) + try: + return int(text, 0) + except ValueError: + raise ValueError('Couldn\'t parse integer: %s' % orig_text) + + +def ParseFloat(text): + """Parse a floating point number. + + Args: + text: Text to parse. + + Returns: + The number parsed. + + Raises: + ValueError: If a floating point number couldn't be parsed. + """ + try: + # Assume Python compatible syntax. + return float(text) + except ValueError: + # Check alternative spellings. + if _FLOAT_INFINITY.match(text): + if text[0] == '-': + return float('-inf') + else: + return float('inf') + elif _FLOAT_NAN.match(text): + return float('nan') + else: + # assume '1.0f' format + try: + return float(text.rstrip('f')) + except ValueError: + raise ValueError('Couldn\'t parse float: %s' % text) + + +def ParseBool(text): + """Parse a boolean value. + + Args: + text: Text to parse. + + Returns: + Boolean values parsed + + Raises: + ValueError: If text is not a valid boolean. + """ + if text in ('true', 't', '1', 'True'): + return True + elif text in ('false', 'f', '0', 'False'): + return False + else: + raise ValueError('Expected "true" or "false".') + + +def ParseEnum(field, value): + """Parse an enum value. + + The value can be specified by a number (the enum value), or by + a string literal (the enum name). + + Args: + field: Enum field descriptor. + value: String value. + + Returns: + Enum value number. + + Raises: + ValueError: If the enum value could not be parsed. + """ + enum_descriptor = field.enum_type + try: + number = int(value, 0) + except ValueError: + # Identifier. + enum_value = enum_descriptor.values_by_name.get(value, None) + if enum_value is None: + raise ValueError('Enum type "%s" has no value named %s.' % + (enum_descriptor.full_name, value)) + else: + # Numeric value. + if hasattr(field.file, 'syntax'): + # Attribute is checked for compatibility. + if field.file.syntax == 'proto3': + # Proto3 accept numeric unknown enums. + return number + enum_value = enum_descriptor.values_by_number.get(number, None) + if enum_value is None: + raise ValueError('Enum type "%s" has no value with number %d.' % + (enum_descriptor.full_name, number)) + return enum_value.number diff --git a/openpype/hosts/nuke/vendor/google/protobuf/timestamp_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/timestamp_pb2.py new file mode 100644 index 0000000000..558d496941 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/timestamp_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/timestamp.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x85\x01\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01Z2google.golang.org/protobuf/types/known/timestamppb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.timestamp_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016TimestampProtoP\001Z2google.golang.org/protobuf/types/known/timestamppb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _TIMESTAMP._serialized_start=52 + _TIMESTAMP._serialized_end=95 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/type_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/type_pb2.py new file mode 100644 index 0000000000..19903fb6b4 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/type_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/type.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1agoogle/protobuf/type.proto\x12\x0fgoogle.protobuf\x1a\x19google/protobuf/any.proto\x1a$google/protobuf/source_context.proto\"\xd7\x01\n\x04Type\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Field\x12\x0e\n\x06oneofs\x18\x03 \x03(\t\x12(\n\x07options\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x06 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x05\n\x05\x46ield\x12)\n\x04kind\x18\x01 \x01(\x0e\x32\x1b.google.protobuf.Field.Kind\x12\x37\n\x0b\x63\x61rdinality\x18\x02 \x01(\x0e\x32\".google.protobuf.Field.Cardinality\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x10\n\x08type_url\x18\x06 \x01(\t\x12\x13\n\x0boneof_index\x18\x07 \x01(\x05\x12\x0e\n\x06packed\x18\x08 \x01(\x08\x12(\n\x07options\x18\t \x03(\x0b\x32\x17.google.protobuf.Option\x12\x11\n\tjson_name\x18\n \x01(\t\x12\x15\n\rdefault_value\x18\x0b \x01(\t\"\xc8\x02\n\x04Kind\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"t\n\x0b\x43\x61rdinality\x12\x17\n\x13\x43\x41RDINALITY_UNKNOWN\x10\x00\x12\x18\n\x14\x43\x41RDINALITY_OPTIONAL\x10\x01\x12\x18\n\x14\x43\x41RDINALITY_REQUIRED\x10\x02\x12\x18\n\x14\x43\x41RDINALITY_REPEATED\x10\x03\"\xce\x01\n\x04\x45num\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tenumvalue\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.EnumValue\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x05 \x01(\x0e\x32\x17.google.protobuf.Syntax\"S\n\tEnumValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\";\n\x06Option\x12\x0c\n\x04name\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any*.\n\x06Syntax\x12\x11\n\rSYNTAX_PROTO2\x10\x00\x12\x11\n\rSYNTAX_PROTO3\x10\x01\x42{\n\x13\x63om.google.protobufB\tTypeProtoP\x01Z-google.golang.org/protobuf/types/known/typepb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.type_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\tTypeProtoP\001Z-google.golang.org/protobuf/types/known/typepb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _SYNTAX._serialized_start=1413 + _SYNTAX._serialized_end=1459 + _TYPE._serialized_start=113 + _TYPE._serialized_end=328 + _FIELD._serialized_start=331 + _FIELD._serialized_end=1056 + _FIELD_KIND._serialized_start=610 + _FIELD_KIND._serialized_end=938 + _FIELD_CARDINALITY._serialized_start=940 + _FIELD_CARDINALITY._serialized_end=1056 + _ENUM._serialized_start=1059 + _ENUM._serialized_end=1265 + _ENUMVALUE._serialized_start=1267 + _ENUMVALUE._serialized_end=1350 + _OPTION._serialized_start=1352 + _OPTION._serialized_end=1411 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/util/__init__.py b/openpype/hosts/nuke/vendor/google/protobuf/util/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_pb2.py new file mode 100644 index 0000000000..66a5836c82 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_pb2.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/util/json_format.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&google/protobuf/util/json_format.proto\x12\x11protobuf_unittest\"\x89\x01\n\x13TestFlagsAndStrings\x12\t\n\x01\x41\x18\x01 \x02(\x05\x12K\n\rrepeatedgroup\x18\x02 \x03(\n24.protobuf_unittest.TestFlagsAndStrings.RepeatedGroup\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x66\x18\x03 \x02(\t\"!\n\x14TestBase64ByteArrays\x12\t\n\x01\x61\x18\x01 \x02(\x0c\"G\n\x12TestJavaScriptJSON\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x05\x66inal\x18\x02 \x01(\x02\x12\n\n\x02in\x18\x03 \x01(\t\x12\x0b\n\x03Var\x18\x04 \x01(\t\"Q\n\x18TestJavaScriptOrderJSON1\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\"\x89\x01\n\x18TestJavaScriptOrderJSON2\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\x36\n\x01z\x18\x06 \x03(\x0b\x32+.protobuf_unittest.TestJavaScriptOrderJSON1\"$\n\x0cTestLargeInt\x12\t\n\x01\x61\x18\x01 \x02(\x03\x12\t\n\x01\x62\x18\x02 \x02(\x04\"\xa0\x01\n\x0bTestNumbers\x12\x30\n\x01\x61\x18\x01 \x01(\x0e\x32%.protobuf_unittest.TestNumbers.MyType\x12\t\n\x01\x62\x18\x02 \x01(\x05\x12\t\n\x01\x63\x18\x03 \x01(\x02\x12\t\n\x01\x64\x18\x04 \x01(\x08\x12\t\n\x01\x65\x18\x05 \x01(\x01\x12\t\n\x01\x66\x18\x06 \x01(\r\"(\n\x06MyType\x12\x06\n\x02OK\x10\x00\x12\x0b\n\x07WARNING\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"T\n\rTestCamelCase\x12\x14\n\x0cnormal_field\x18\x01 \x01(\t\x12\x15\n\rCAPITAL_FIELD\x18\x02 \x01(\x05\x12\x16\n\x0e\x43\x61melCaseField\x18\x03 \x01(\x05\"|\n\x0bTestBoolMap\x12=\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32+.protobuf_unittest.TestBoolMap.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"O\n\rTestRecursion\x12\r\n\x05value\x18\x01 \x01(\x05\x12/\n\x05\x63hild\x18\x02 \x01(\x0b\x32 .protobuf_unittest.TestRecursion\"\x86\x01\n\rTestStringMap\x12\x43\n\nstring_map\x18\x01 \x03(\x0b\x32/.protobuf_unittest.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc4\x01\n\x14TestStringSerializer\x12\x15\n\rscalar_string\x18\x01 \x01(\t\x12\x17\n\x0frepeated_string\x18\x02 \x03(\t\x12J\n\nstring_map\x18\x03 \x03(\x0b\x32\x36.protobuf_unittest.TestStringSerializer.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"$\n\x18TestMessageWithExtension*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"z\n\rTestExtension\x12\r\n\x05value\x18\x01 \x01(\t2Z\n\x03\x65xt\x12+.protobuf_unittest.TestMessageWithExtension\x18\x64 \x01(\x0b\x32 .protobuf_unittest.TestExtension\"Q\n\x14TestDefaultEnumValue\x12\x39\n\nenum_value\x18\x01 \x01(\x0e\x32\x1c.protobuf_unittest.EnumValue:\x07\x44\x45\x46\x41ULT*2\n\tEnumValue\x12\x0c\n\x08PROTOCOL\x10\x00\x12\n\n\x06\x42UFFER\x10\x01\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x02') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestMessageWithExtension.RegisterExtension(_TESTEXTENSION.extensions_by_name['ext']) + + DESCRIPTOR._options = None + _TESTBOOLMAP_BOOLMAPENTRY._options = None + _TESTBOOLMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGMAP_STRINGMAPENTRY._options = None + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._options = None + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_options = b'8\001' + _ENUMVALUE._serialized_start=1607 + _ENUMVALUE._serialized_end=1657 + _TESTFLAGSANDSTRINGS._serialized_start=62 + _TESTFLAGSANDSTRINGS._serialized_end=199 + _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_start=173 + _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_end=199 + _TESTBASE64BYTEARRAYS._serialized_start=201 + _TESTBASE64BYTEARRAYS._serialized_end=234 + _TESTJAVASCRIPTJSON._serialized_start=236 + _TESTJAVASCRIPTJSON._serialized_end=307 + _TESTJAVASCRIPTORDERJSON1._serialized_start=309 + _TESTJAVASCRIPTORDERJSON1._serialized_end=390 + _TESTJAVASCRIPTORDERJSON2._serialized_start=393 + _TESTJAVASCRIPTORDERJSON2._serialized_end=530 + _TESTLARGEINT._serialized_start=532 + _TESTLARGEINT._serialized_end=568 + _TESTNUMBERS._serialized_start=571 + _TESTNUMBERS._serialized_end=731 + _TESTNUMBERS_MYTYPE._serialized_start=691 + _TESTNUMBERS_MYTYPE._serialized_end=731 + _TESTCAMELCASE._serialized_start=733 + _TESTCAMELCASE._serialized_end=817 + _TESTBOOLMAP._serialized_start=819 + _TESTBOOLMAP._serialized_end=943 + _TESTBOOLMAP_BOOLMAPENTRY._serialized_start=897 + _TESTBOOLMAP_BOOLMAPENTRY._serialized_end=943 + _TESTRECURSION._serialized_start=945 + _TESTRECURSION._serialized_end=1024 + _TESTSTRINGMAP._serialized_start=1027 + _TESTSTRINGMAP._serialized_end=1161 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=1113 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=1161 + _TESTSTRINGSERIALIZER._serialized_start=1164 + _TESTSTRINGSERIALIZER._serialized_end=1360 + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_start=1113 + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_end=1161 + _TESTMESSAGEWITHEXTENSION._serialized_start=1362 + _TESTMESSAGEWITHEXTENSION._serialized_end=1398 + _TESTEXTENSION._serialized_start=1400 + _TESTEXTENSION._serialized_end=1522 + _TESTDEFAULTENUMVALUE._serialized_start=1524 + _TESTDEFAULTENUMVALUE._serialized_end=1605 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_proto3_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_proto3_pb2.py new file mode 100644 index 0000000000..5498deafa9 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_proto3_pb2.py @@ -0,0 +1,129 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/util/json_format_proto3.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 +from google.protobuf import unittest_pb2 as google_dot_protobuf_dot_unittest__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n-google/protobuf/util/json_format_proto3.proto\x12\x06proto3\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1egoogle/protobuf/unittest.proto\"\x1c\n\x0bMessageType\x12\r\n\x05value\x18\x01 \x01(\x05\"\x94\x05\n\x0bTestMessage\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x13\n\x0bint32_value\x18\x02 \x01(\x05\x12\x13\n\x0bint64_value\x18\x03 \x01(\x03\x12\x14\n\x0cuint32_value\x18\x04 \x01(\r\x12\x14\n\x0cuint64_value\x18\x05 \x01(\x04\x12\x13\n\x0b\x66loat_value\x18\x06 \x01(\x02\x12\x14\n\x0c\x64ouble_value\x18\x07 \x01(\x01\x12\x14\n\x0cstring_value\x18\x08 \x01(\t\x12\x13\n\x0b\x62ytes_value\x18\t \x01(\x0c\x12$\n\nenum_value\x18\n \x01(\x0e\x32\x10.proto3.EnumType\x12*\n\rmessage_value\x18\x0b \x01(\x0b\x32\x13.proto3.MessageType\x12\x1b\n\x13repeated_bool_value\x18\x15 \x03(\x08\x12\x1c\n\x14repeated_int32_value\x18\x16 \x03(\x05\x12\x1c\n\x14repeated_int64_value\x18\x17 \x03(\x03\x12\x1d\n\x15repeated_uint32_value\x18\x18 \x03(\r\x12\x1d\n\x15repeated_uint64_value\x18\x19 \x03(\x04\x12\x1c\n\x14repeated_float_value\x18\x1a \x03(\x02\x12\x1d\n\x15repeated_double_value\x18\x1b \x03(\x01\x12\x1d\n\x15repeated_string_value\x18\x1c \x03(\t\x12\x1c\n\x14repeated_bytes_value\x18\x1d \x03(\x0c\x12-\n\x13repeated_enum_value\x18\x1e \x03(\x0e\x32\x10.proto3.EnumType\x12\x33\n\x16repeated_message_value\x18\x1f \x03(\x0b\x32\x13.proto3.MessageType\"\x8c\x02\n\tTestOneof\x12\x1b\n\x11oneof_int32_value\x18\x01 \x01(\x05H\x00\x12\x1c\n\x12oneof_string_value\x18\x02 \x01(\tH\x00\x12\x1b\n\x11oneof_bytes_value\x18\x03 \x01(\x0cH\x00\x12,\n\x10oneof_enum_value\x18\x04 \x01(\x0e\x32\x10.proto3.EnumTypeH\x00\x12\x32\n\x13oneof_message_value\x18\x05 \x01(\x0b\x32\x13.proto3.MessageTypeH\x00\x12\x36\n\x10oneof_null_value\x18\x06 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x42\r\n\x0boneof_value\"\xe1\x04\n\x07TestMap\x12.\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\x1c.proto3.TestMap.BoolMapEntry\x12\x30\n\tint32_map\x18\x02 \x03(\x0b\x32\x1d.proto3.TestMap.Int32MapEntry\x12\x30\n\tint64_map\x18\x03 \x03(\x0b\x32\x1d.proto3.TestMap.Int64MapEntry\x12\x32\n\nuint32_map\x18\x04 \x03(\x0b\x32\x1e.proto3.TestMap.Uint32MapEntry\x12\x32\n\nuint64_map\x18\x05 \x03(\x0b\x32\x1e.proto3.TestMap.Uint64MapEntry\x12\x32\n\nstring_map\x18\x06 \x03(\x0b\x32\x1e.proto3.TestMap.StringMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\x85\x06\n\rTestNestedMap\x12\x34\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\".proto3.TestNestedMap.BoolMapEntry\x12\x36\n\tint32_map\x18\x02 \x03(\x0b\x32#.proto3.TestNestedMap.Int32MapEntry\x12\x36\n\tint64_map\x18\x03 \x03(\x0b\x32#.proto3.TestNestedMap.Int64MapEntry\x12\x38\n\nuint32_map\x18\x04 \x03(\x0b\x32$.proto3.TestNestedMap.Uint32MapEntry\x12\x38\n\nuint64_map\x18\x05 \x03(\x0b\x32$.proto3.TestNestedMap.Uint64MapEntry\x12\x38\n\nstring_map\x18\x06 \x03(\x0b\x32$.proto3.TestNestedMap.StringMapEntry\x12\x32\n\x07map_map\x18\x07 \x03(\x0b\x32!.proto3.TestNestedMap.MapMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x44\n\x0bMapMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.proto3.TestNestedMap:\x02\x38\x01\"{\n\rTestStringMap\x12\x38\n\nstring_map\x18\x01 \x03(\x0b\x32$.proto3.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xee\x07\n\x0bTestWrapper\x12.\n\nbool_value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x30\n\x0bint32_value\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0bint64_value\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x32\n\x0cuint32_value\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\x12\x32\n\x0cuint64_value\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x30\n\x0b\x66loat_value\x18\x06 \x01(\x0b\x32\x1b.google.protobuf.FloatValue\x12\x32\n\x0c\x64ouble_value\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x32\n\x0cstring_value\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\x0b\x62ytes_value\x18\t \x01(\x0b\x32\x1b.google.protobuf.BytesValue\x12\x37\n\x13repeated_bool_value\x18\x0b \x03(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x14repeated_int32_value\x18\x0c \x03(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x39\n\x14repeated_int64_value\x18\r \x03(\x0b\x32\x1b.google.protobuf.Int64Value\x12;\n\x15repeated_uint32_value\x18\x0e \x03(\x0b\x32\x1c.google.protobuf.UInt32Value\x12;\n\x15repeated_uint64_value\x18\x0f \x03(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x39\n\x14repeated_float_value\x18\x10 \x03(\x0b\x32\x1b.google.protobuf.FloatValue\x12;\n\x15repeated_double_value\x18\x11 \x03(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15repeated_string_value\x18\x12 \x03(\x0b\x32\x1c.google.protobuf.StringValue\x12\x39\n\x14repeated_bytes_value\x18\x13 \x03(\x0b\x32\x1b.google.protobuf.BytesValue\"n\n\rTestTimestamp\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\"k\n\x0cTestDuration\x12(\n\x05value\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x19.google.protobuf.Duration\":\n\rTestFieldMask\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"e\n\nTestStruct\x12&\n\x05value\x18\x01 \x01(\x0b\x32\x17.google.protobuf.Struct\x12/\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Struct\"\\\n\x07TestAny\x12#\n\x05value\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12,\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x14.google.protobuf.Any\"b\n\tTestValue\x12%\n\x05value\x18\x01 \x01(\x0b\x32\x16.google.protobuf.Value\x12.\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Value\"n\n\rTestListValue\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.ListValue\"\x89\x01\n\rTestBoolValue\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x34\n\x08\x62ool_map\x18\x02 \x03(\x0b\x32\".proto3.TestBoolValue.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"+\n\x12TestCustomJsonName\x12\x15\n\x05value\x18\x01 \x01(\x05R\x06@value\"J\n\x0eTestExtensions\x12\x38\n\nextensions\x18\x01 \x01(\x0b\x32$.protobuf_unittest.TestAllExtensions\"\x84\x01\n\rTestEnumValue\x12%\n\x0b\x65num_value1\x18\x01 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value2\x18\x02 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value3\x18\x03 \x01(\x0e\x32\x10.proto3.EnumType*\x1c\n\x08\x45numType\x12\x07\n\x03\x46OO\x10\x00\x12\x07\n\x03\x42\x41R\x10\x01\x42,\n\x18\x63om.google.protobuf.utilB\x10JsonFormatProto3b\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_proto3_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\030com.google.protobuf.utilB\020JsonFormatProto3' + _TESTMAP_BOOLMAPENTRY._options = None + _TESTMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTMAP_INT32MAPENTRY._options = None + _TESTMAP_INT32MAPENTRY._serialized_options = b'8\001' + _TESTMAP_INT64MAPENTRY._options = None + _TESTMAP_INT64MAPENTRY._serialized_options = b'8\001' + _TESTMAP_UINT32MAPENTRY._options = None + _TESTMAP_UINT32MAPENTRY._serialized_options = b'8\001' + _TESTMAP_UINT64MAPENTRY._options = None + _TESTMAP_UINT64MAPENTRY._serialized_options = b'8\001' + _TESTMAP_STRINGMAPENTRY._options = None + _TESTMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_BOOLMAPENTRY._options = None + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_INT32MAPENTRY._options = None + _TESTNESTEDMAP_INT32MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_INT64MAPENTRY._options = None + _TESTNESTEDMAP_INT64MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_UINT32MAPENTRY._options = None + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_UINT64MAPENTRY._options = None + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_STRINGMAPENTRY._options = None + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_MAPMAPENTRY._options = None + _TESTNESTEDMAP_MAPMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGMAP_STRINGMAPENTRY._options = None + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTBOOLVALUE_BOOLMAPENTRY._options = None + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_options = b'8\001' + _ENUMTYPE._serialized_start=4849 + _ENUMTYPE._serialized_end=4877 + _MESSAGETYPE._serialized_start=277 + _MESSAGETYPE._serialized_end=305 + _TESTMESSAGE._serialized_start=308 + _TESTMESSAGE._serialized_end=968 + _TESTONEOF._serialized_start=971 + _TESTONEOF._serialized_end=1239 + _TESTMAP._serialized_start=1242 + _TESTMAP._serialized_end=1851 + _TESTMAP_BOOLMAPENTRY._serialized_start=1557 + _TESTMAP_BOOLMAPENTRY._serialized_end=1603 + _TESTMAP_INT32MAPENTRY._serialized_start=1605 + _TESTMAP_INT32MAPENTRY._serialized_end=1652 + _TESTMAP_INT64MAPENTRY._serialized_start=1654 + _TESTMAP_INT64MAPENTRY._serialized_end=1701 + _TESTMAP_UINT32MAPENTRY._serialized_start=1703 + _TESTMAP_UINT32MAPENTRY._serialized_end=1751 + _TESTMAP_UINT64MAPENTRY._serialized_start=1753 + _TESTMAP_UINT64MAPENTRY._serialized_end=1801 + _TESTMAP_STRINGMAPENTRY._serialized_start=1803 + _TESTMAP_STRINGMAPENTRY._serialized_end=1851 + _TESTNESTEDMAP._serialized_start=1854 + _TESTNESTEDMAP._serialized_end=2627 + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_start=1557 + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_end=1603 + _TESTNESTEDMAP_INT32MAPENTRY._serialized_start=1605 + _TESTNESTEDMAP_INT32MAPENTRY._serialized_end=1652 + _TESTNESTEDMAP_INT64MAPENTRY._serialized_start=1654 + _TESTNESTEDMAP_INT64MAPENTRY._serialized_end=1701 + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_start=1703 + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_end=1751 + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_start=1753 + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_end=1801 + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_start=1803 + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_end=1851 + _TESTNESTEDMAP_MAPMAPENTRY._serialized_start=2559 + _TESTNESTEDMAP_MAPMAPENTRY._serialized_end=2627 + _TESTSTRINGMAP._serialized_start=2629 + _TESTSTRINGMAP._serialized_end=2752 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=2704 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=2752 + _TESTWRAPPER._serialized_start=2755 + _TESTWRAPPER._serialized_end=3761 + _TESTTIMESTAMP._serialized_start=3763 + _TESTTIMESTAMP._serialized_end=3873 + _TESTDURATION._serialized_start=3875 + _TESTDURATION._serialized_end=3982 + _TESTFIELDMASK._serialized_start=3984 + _TESTFIELDMASK._serialized_end=4042 + _TESTSTRUCT._serialized_start=4044 + _TESTSTRUCT._serialized_end=4145 + _TESTANY._serialized_start=4147 + _TESTANY._serialized_end=4239 + _TESTVALUE._serialized_start=4241 + _TESTVALUE._serialized_end=4339 + _TESTLISTVALUE._serialized_start=4341 + _TESTLISTVALUE._serialized_end=4451 + _TESTBOOLVALUE._serialized_start=4454 + _TESTBOOLVALUE._serialized_end=4591 + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_start=1557 + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_end=1603 + _TESTCUSTOMJSONNAME._serialized_start=4593 + _TESTCUSTOMJSONNAME._serialized_end=4636 + _TESTEXTENSIONS._serialized_start=4638 + _TESTEXTENSIONS._serialized_end=4712 + _TESTENUMVALUE._serialized_start=4715 + _TESTENUMVALUE._serialized_end=4847 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/wrappers_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/wrappers_pb2.py new file mode 100644 index 0000000000..e49eb4c15d --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/wrappers_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/wrappers.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/wrappers.proto\x12\x0fgoogle.protobuf\"\x1c\n\x0b\x44oubleValue\x12\r\n\x05value\x18\x01 \x01(\x01\"\x1b\n\nFloatValue\x12\r\n\x05value\x18\x01 \x01(\x02\"\x1b\n\nInt64Value\x12\r\n\x05value\x18\x01 \x01(\x03\"\x1c\n\x0bUInt64Value\x12\r\n\x05value\x18\x01 \x01(\x04\"\x1b\n\nInt32Value\x12\r\n\x05value\x18\x01 \x01(\x05\"\x1c\n\x0bUInt32Value\x12\r\n\x05value\x18\x01 \x01(\r\"\x1a\n\tBoolValue\x12\r\n\x05value\x18\x01 \x01(\x08\"\x1c\n\x0bStringValue\x12\r\n\x05value\x18\x01 \x01(\t\"\x1b\n\nBytesValue\x12\r\n\x05value\x18\x01 \x01(\x0c\x42\x83\x01\n\x13\x63om.google.protobufB\rWrappersProtoP\x01Z1google.golang.org/protobuf/types/known/wrapperspb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.wrappers_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rWrappersProtoP\001Z1google.golang.org/protobuf/types/known/wrapperspb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _DOUBLEVALUE._serialized_start=51 + _DOUBLEVALUE._serialized_end=79 + _FLOATVALUE._serialized_start=81 + _FLOATVALUE._serialized_end=108 + _INT64VALUE._serialized_start=110 + _INT64VALUE._serialized_end=137 + _UINT64VALUE._serialized_start=139 + _UINT64VALUE._serialized_end=167 + _INT32VALUE._serialized_start=169 + _INT32VALUE._serialized_end=196 + _UINT32VALUE._serialized_start=198 + _UINT32VALUE._serialized_end=226 + _BOOLVALUE._serialized_start=228 + _BOOLVALUE._serialized_end=254 + _STRINGVALUE._serialized_start=256 + _STRINGVALUE._serialized_end=284 + _BYTESVALUE._serialized_start=286 + _BYTESVALUE._serialized_end=313 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/photoshop/__init__.py b/openpype/hosts/photoshop/__init__.py index a91e0a65ff..773f73d624 100644 --- a/openpype/hosts/photoshop/__init__.py +++ b/openpype/hosts/photoshop/__init__.py @@ -1,9 +1,10 @@ -def add_implementation_envs(env, _app): - """Modify environments to contain all required for implementation.""" - defaults = { - "OPENPYPE_LOG_NO_COLORS": "True", - "WEBSOCKET_URL": "ws://localhost:8099/ws/" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value +from .addon import ( + PhotoshopAddon, + PHOTOSHOP_HOST_DIR, +) + + +__all__ = ( + "PhotoshopAddon", + "PHOTOSHOP_HOST_DIR", +) diff --git a/openpype/hosts/photoshop/addon.py b/openpype/hosts/photoshop/addon.py new file mode 100644 index 0000000000..a41d91554b --- /dev/null +++ b/openpype/hosts/photoshop/addon.py @@ -0,0 +1,26 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +PHOTOSHOP_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class PhotoshopAddon(OpenPypeModule, IHostAddon): + name = "photoshop" + host_name = "photoshop" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + """Modify environments to contain all required for implementation.""" + defaults = { + "OPENPYPE_LOG_NO_COLORS": "True", + "WEBSOCKET_URL": "ws://localhost:8099/ws/" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_workfile_extensions(self): + return [".psd", ".psb"] diff --git a/openpype/hosts/photoshop/api/README.md b/openpype/hosts/photoshop/api/README.md index 80792a4da0..4a36746cb2 100644 --- a/openpype/hosts/photoshop/api/README.md +++ b/openpype/hosts/photoshop/api/README.md @@ -127,11 +127,11 @@ class CollectInstances(pyblish.api.ContextPlugin): ```python import os -import openpype.api -from avalon import photoshop +from openpype.pipeline import publish +from openpype.hosts.photoshop import api as photoshop -class ExtractImage(openpype.api.Extractor): +class ExtractImage(publish.Extractor): """Produce a flattened image file from instance This plug-in takes into account only the layers in the group. diff --git a/openpype/hosts/photoshop/api/launch_logic.py b/openpype/hosts/photoshop/api/launch_logic.py index 0bbb19523d..1f0203dca6 100644 --- a/openpype/hosts/photoshop/api/launch_logic.py +++ b/openpype/hosts/photoshop/api/launch_logic.py @@ -10,7 +10,7 @@ from wsrpc_aiohttp import ( from Qt import QtCore -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools from openpype.tools.adobe_webserver.app import WebServerTool diff --git a/openpype/hosts/photoshop/api/lib.py b/openpype/hosts/photoshop/api/lib.py index 2f57d64464..221b4314e6 100644 --- a/openpype/hosts/photoshop/api/lib.py +++ b/openpype/hosts/photoshop/api/lib.py @@ -5,11 +5,10 @@ import traceback from Qt import QtWidgets -from openpype.api import Logger +from openpype.lib import env_value_to_bool, Logger +from openpype.modules import ModulesManager from openpype.pipeline import install_host from openpype.tools.utils import host_tools -from openpype.lib.remote_publish import headless_publish -from openpype.lib import env_value_to_bool from .launch_logic import ProcessLauncher, stub @@ -35,8 +34,10 @@ def main(*subprocess_args): launcher.start() if env_value_to_bool("HEADLESS_PUBLISH"): + manager = ModulesManager() + webpublisher_addon = manager["webpublisher"] launcher.execute_in_main_thread( - headless_publish, + webpublisher_addon.headless_publish, log, "ClosePS", os.environ.get("IS_TEST") @@ -63,10 +64,15 @@ def maintained_selection(): @contextlib.contextmanager -def maintained_visibility(): - """Maintain visibility during context.""" +def maintained_visibility(layers=None): + """Maintain visibility during context. + + Args: + layers (list) of PSItem (used for caching) + """ visibility = {} - layers = stub().get_layers() + if not layers: + layers = stub().get_layers() for layer in layers: visibility[layer.id] = layer.visible try: diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index ee150d1808..9f6fc0983c 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -3,8 +3,7 @@ from Qt import QtWidgets import pyblish.api -from openpype.api import Logger -from openpype.lib import register_event_callback +from openpype.lib import register_event_callback, Logger from openpype.pipeline import ( legacy_io, register_loader_plugin_path, @@ -14,14 +13,13 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.load import any_outdated_containers -import openpype.hosts.photoshop +from openpype.hosts.photoshop import PHOTOSHOP_HOST_DIR from . import lib log = Logger.get_logger(__name__) -HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.photoshop.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PLUGINS_DIR = os.path.join(PHOTOSHOP_HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") diff --git a/openpype/hosts/photoshop/api/workio.py b/openpype/hosts/photoshop/api/workio.py index 951c5dbfff..35b44d6070 100644 --- a/openpype/hosts/photoshop/api/workio.py +++ b/openpype/hosts/photoshop/api/workio.py @@ -1,7 +1,6 @@ """Host API required Work Files tool""" import os -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS from . import lib @@ -14,7 +13,7 @@ def _active_document(): def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["photoshop"] + return [".psd", ".psb"] def has_unsaved_changes(): diff --git a/openpype/hosts/photoshop/api/ws_stub.py b/openpype/hosts/photoshop/api/ws_stub.py index b49bf1c73f..2c4d0ad5fc 100644 --- a/openpype/hosts/photoshop/api/ws_stub.py +++ b/openpype/hosts/photoshop/api/ws_stub.py @@ -229,10 +229,11 @@ class PhotoshopServerStub: return self._get_layers_in_layers(parent_ids) - def get_layers_in_layers_ids(self, layers_ids): + def get_layers_in_layers_ids(self, layers_ids, layers=None): """Return all layers that belong to layers (might be groups). Args: + layers_ids layers : Returns: @@ -240,10 +241,13 @@ class PhotoshopServerStub: """ parent_ids = set(layers_ids) - return self._get_layers_in_layers(parent_ids) + return self._get_layers_in_layers(parent_ids, layers) - def _get_layers_in_layers(self, parent_ids): - all_layers = self.get_layers() + def _get_layers_in_layers(self, parent_ids, layers=None): + if not layers: + layers = self.get_layers() + + all_layers = layers ret = [] for layer in all_layers: @@ -394,14 +398,17 @@ class PhotoshopServerStub: self.hide_all_others_layers_ids(extract_ids) - def hide_all_others_layers_ids(self, extract_ids): + def hide_all_others_layers_ids(self, extract_ids, layers=None): """hides all layers that are not part of the list or that are not children of this list Args: extract_ids (list): list of integer that should be visible + layers (list) of PSItem (used for caching) """ - for layer in self.get_layers(): + if not layers: + layers = self.get_layers() + for layer in layers: if layer.visible and layer.id not in extract_ids: self.set_visible(layer.id, False) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py index 2881ef0ea6..5d50a78914 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py @@ -17,11 +17,11 @@ import os import pyblish.api -from openpype.lib.plugin_tools import ( - parse_json, - get_batch_asset_task_info -) from openpype.pipeline import legacy_io +from openpype_modules.webpublisher.lib import ( + get_batch_asset_task_info, + parse_json +) class CollectBatchData(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py index 71bd2cd854..c157c932fd 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py @@ -9,14 +9,22 @@ from openpype.settings import get_project_settings class CollectColorCodedInstances(pyblish.api.ContextPlugin): - """Creates instances for configured color code of a layer. + """Creates instances for layers marked by configurable color. Used in remote publishing when artists marks publishable layers by color- - coding. + coding. Top level layers (group) must be marked by specific color to be + published as an instance of 'image' family. Can add group for all publishable layers to allow creation of flattened image. (Cannot contain special background layer as it cannot be grouped!) + Based on value `create_flatten_image` from Settings: + - "yes": create flattened 'image' subset of all publishable layers + create + 'image' subset per publishable layer + - "only": create ONLY flattened 'image' subset of all publishable layers + - "no": do not create flattened 'image' subset at all, + only separate subsets per marked layer. + Identifier: id (str): "pyblish.avalon.instance" """ @@ -32,8 +40,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): # TODO check if could be set globally, probably doesn't make sense when # flattened template cannot subset_template_name = "" - create_flatten_image = False - # probably not possible to configure this globally + create_flatten_image = "no" flatten_subset_template = "" def process(self, context): @@ -62,6 +69,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): publishable_layers = [] created_instances = [] + family_from_settings = None for layer in layers: self.log.debug("Layer:: {}".format(layer)) if layer.parents: @@ -80,6 +88,9 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): self.log.debug("!!! Not found family or template, skip") continue + if not family_from_settings: + family_from_settings = resolved_family + fill_pairs = { "variant": variant, "family": resolved_family, @@ -98,13 +109,16 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): "Subset {} already created, skipping.".format(subset)) continue - instance = self._create_instance(context, layer, resolved_family, - asset_name, subset, task_name) + if self.create_flatten_image != "flatten_only": + instance = self._create_instance(context, layer, + resolved_family, + asset_name, subset, task_name) + created_instances.append(instance) + existing_subset_names.append(subset) publishable_layers.append(layer) - created_instances.append(instance) - if self.create_flatten_image and publishable_layers: + if self.create_flatten_image != "no" and publishable_layers: self.log.debug("create_flatten_image") if not self.flatten_subset_template: self.log.warning("No template for flatten image") @@ -116,7 +130,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): first_layer = publishable_layers[0] # dummy layer first_layer.name = subset - family = created_instances[0].data["family"] # inherit family + family = family_from_settings # inherit family instance = self._create_instance(context, first_layer, family, asset_name, subset, task_name) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_published_version.py b/openpype/hosts/photoshop/plugins/publish/collect_published_version.py new file mode 100644 index 0000000000..2502689e4b --- /dev/null +++ b/openpype/hosts/photoshop/plugins/publish/collect_published_version.py @@ -0,0 +1,55 @@ +"""Collects published version of workfile and increments it. + +For synchronization of published image and workfile version it is required +to store workfile version from workfile file name in context.data["version"]. +In remote publishing this name is unreliable (artist might not follow naming +convention etc.), last published workfile version for particular workfile +subset is used instead. + +This plugin runs only in remote publishing (eg. Webpublisher). + +Requires: + context.data["assetEntity"] + +Provides: + context["version"] - incremented latest published workfile version +""" + +import pyblish.api + +from openpype.client import get_last_version_by_subset_name + + +class CollectPublishedVersion(pyblish.api.ContextPlugin): + """Collects published version of workfile and increments it.""" + + order = pyblish.api.CollectorOrder + 0.190 + label = "Collect published version" + hosts = ["photoshop"] + targets = ["remotepublish"] + + def process(self, context): + workfile_subset_name = None + for instance in context: + if instance.data["family"] == "workfile": + workfile_subset_name = instance.data["subset"] + break + + if not workfile_subset_name: + self.log.warning("No workfile instance found, " + "synchronization of version will not work.") + return + + project_name = context.data["projectName"] + asset_doc = context.data["assetEntity"] + asset_id = asset_doc["_id"] + + version_doc = get_last_version_by_subset_name(project_name, + workfile_subset_name, + asset_id) + version_int = 1 + if version_doc: + version_int += int(version_doc["name"]) + + self.log.debug(f"Setting {version_int} to context.") + context.data["version"] = version_int diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py index 2ea5503f3f..7e598a8250 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_review.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py @@ -10,7 +10,7 @@ import os import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectReview(pyblish.api.ContextPlugin): @@ -25,15 +25,18 @@ class CollectReview(pyblish.api.ContextPlugin): hosts = ["photoshop"] order = pyblish.api.CollectorOrder + 0.1 + publish = True + def process(self, context): family = "review" - subset = get_subset_name_with_asset_doc( + subset = get_subset_name( family, context.data.get("variant", ''), context.data["anatomyData"]["task"]["name"], context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"] + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] ) instance = context.create_instance(subset) @@ -44,5 +47,6 @@ class CollectReview(pyblish.api.ContextPlugin): "family": family, "families": [], "representations": [], - "asset": os.environ["AVALON_ASSET"] + "asset": os.environ["AVALON_ASSET"], + "publish": self.publish }) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_version.py b/openpype/hosts/photoshop/plugins/publish/collect_version.py new file mode 100644 index 0000000000..cda71d8643 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/publish/collect_version.py @@ -0,0 +1,29 @@ +import pyblish.api + + +class CollectVersion(pyblish.api.InstancePlugin): + """Collect version for publishable instances. + + Used to synchronize version from workfile to all publishable instances: + - image (manually created or color coded) + - review + - workfile + + Dev comment: + Explicit collector created to control this from single place and not from + 3 different. + + Workfile set here explicitly as version might to be forced from latest + 1 + because of Webpublisher. + (This plugin must run after CollectPublishedVersion!) + """ + order = pyblish.api.CollectorOrder + 0.200 + label = 'Collect Version' + + hosts = ["photoshop"] + families = ["image", "review", "workfile"] + + def process(self, instance): + workfile_version = instance.context.data["version"] + self.log.debug(f"Applying version {workfile_version}") + instance.data["version"] = workfile_version diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py index 9cf6d5227e..9a5aad5569 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py @@ -1,7 +1,7 @@ import os import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectWorkfile(pyblish.api.ContextPlugin): @@ -24,13 +24,14 @@ class CollectWorkfile(pyblish.api.ContextPlugin): family = "workfile" # context.data["variant"] might come only from collect_batch_data variant = context.data.get("variant") or self.default_variant - subset = get_subset_name_with_asset_doc( + subset = get_subset_name( family, variant, context.data["anatomyData"]["task"]["name"], context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"] + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] ) file_path = context.data["currentFile"] diff --git a/openpype/hosts/photoshop/plugins/publish/extract_image.py b/openpype/hosts/photoshop/plugins/publish/extract_image.py index a133e33409..cdb28c742d 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_image.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_image.py @@ -1,61 +1,99 @@ import os -import openpype.api +import pyblish.api +from openpype.pipeline import publish from openpype.hosts.photoshop import api as photoshop -class ExtractImage(openpype.api.Extractor): - """Produce a flattened image file from instance +class ExtractImage(pyblish.api.ContextPlugin): + """Extract all layers (groups) marked for publish. - This plug-in takes into account only the layers in the group. + Usually publishable instance is created as a wrapper of layer(s). For each + publishable instance so many images as there is 'formats' is created. + + Logic tries to hide/unhide layers minimum times. + + Called once for all publishable instances. """ + order = publish.Extractor.order - 0.48 label = "Extract Image" hosts = ["photoshop"] + families = ["image", "background"] formats = ["png", "jpg"] - def process(self, instance): - staging_dir = self.staging_dir(instance) - self.log.info("Outputting image to {}".format(staging_dir)) - - # Perform extraction + def process(self, context): stub = photoshop.stub() - files = {} + hidden_layer_ids = set() + + all_layers = stub.get_layers() + for layer in all_layers: + if not layer.visible: + hidden_layer_ids.add(layer.id) + stub.hide_all_others_layers_ids([], layers=all_layers) + with photoshop.maintained_selection(): - self.log.info("Extracting %s" % str(list(instance))) - with photoshop.maintained_visibility(): - ids = set() - layer = instance.data.get("layer") - if layer: - ids.add(layer.id) - add_ids = instance.data.pop("ids", None) - if add_ids: - ids.update(set(add_ids)) - extract_ids = set([ll.id for ll in stub. - get_layers_in_layers_ids(ids)]) - stub.hide_all_others_layers_ids(extract_ids) + with photoshop.maintained_visibility(layers=all_layers): + for instance in context: + if instance.data["family"] not in self.families: + continue - file_basename = os.path.splitext( - stub.get_active_document_name() - )[0] - for extension in self.formats: - _filename = "{}.{}".format(file_basename, extension) - files[extension] = _filename + staging_dir = self.staging_dir(instance) + self.log.info("Outputting image to {}".format(staging_dir)) - full_filename = os.path.join(staging_dir, _filename) - stub.saveAs(full_filename, extension, True) - self.log.info(f"Extracted: {extension}") + # Perform extraction + files = {} + ids = set() + layer = instance.data.get("layer") + if layer: + ids.add(layer.id) + add_ids = instance.data.pop("ids", None) + if add_ids: + ids.update(set(add_ids)) + extract_ids = set([ll.id for ll in stub. + get_layers_in_layers_ids(ids, all_layers) + if ll.id not in hidden_layer_ids]) - representations = [] - for extension, filename in files.items(): - representations.append({ - "name": extension, - "ext": extension, - "files": filename, - "stagingDir": staging_dir - }) - instance.data["representations"] = representations - instance.data["stagingDir"] = staging_dir + for extracted_id in extract_ids: + stub.set_visible(extracted_id, True) - self.log.info(f"Extracted {instance} to {staging_dir}") + file_basename = os.path.splitext( + stub.get_active_document_name() + )[0] + for extension in self.formats: + _filename = "{}.{}".format(file_basename, + extension) + files[extension] = _filename + + full_filename = os.path.join(staging_dir, + _filename) + stub.saveAs(full_filename, extension, True) + self.log.info(f"Extracted: {extension}") + + representations = [] + for extension, filename in files.items(): + representations.append({ + "name": extension, + "ext": extension, + "files": filename, + "stagingDir": staging_dir + }) + instance.data["representations"] = representations + instance.data["stagingDir"] = staging_dir + + self.log.info(f"Extracted {instance} to {staging_dir}") + + for extracted_id in extract_ids: + stub.set_visible(extracted_id, False) + + def staging_dir(self, instance): + """Provide a temporary directory in which to store extracted files + + Upon calling this method the staging directory is stored inside + the instance.data['stagingDir'] + """ + + from openpype.pipeline.publish import get_instance_staging_dir + + return get_instance_staging_dir(instance) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index 5d37c86ed8..01022ce0b2 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -2,12 +2,15 @@ import os import shutil from PIL import Image -import openpype.api -import openpype.lib +from openpype.lib import ( + run_subprocess, + get_ffmpeg_tool_path, +) +from openpype.pipeline import publish from openpype.hosts.photoshop import api as photoshop -class ExtractReview(openpype.api.Extractor): +class ExtractReview(publish.Extractor): """ Produce a flattened or sequence image files from all 'image' instances. @@ -46,7 +49,7 @@ class ExtractReview(openpype.api.Extractor): if self.make_image_sequence and len(layers) > 1: self.log.info("Extract layers to image sequence.") - img_list = self._saves_sequences_layers(staging_dir, layers) + img_list = self._save_sequence_images(staging_dir, layers) instance.data["representations"].append({ "name": "jpg", @@ -61,7 +64,7 @@ class ExtractReview(openpype.api.Extractor): processed_img_names = img_list else: self.log.info("Extract layers to flatten image.") - img_list = self._saves_flattened_layers(staging_dir, layers) + img_list = self._save_flatten_image(staging_dir, layers) instance.data["representations"].append({ "name": "jpg", @@ -72,7 +75,7 @@ class ExtractReview(openpype.api.Extractor): }) processed_img_names = [img_list] - ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg") + ffmpeg_path = get_ffmpeg_tool_path("ffmpeg") instance.data["stagingDir"] = staging_dir @@ -81,6 +84,67 @@ class ExtractReview(openpype.api.Extractor): source_files_pattern = self._check_and_resize(processed_img_names, source_files_pattern, staging_dir) + self._generate_thumbnail(ffmpeg_path, instance, source_files_pattern, + staging_dir) + + no_of_frames = len(processed_img_names) + if no_of_frames > 1: + self._generate_mov(ffmpeg_path, instance, fps, no_of_frames, + source_files_pattern, staging_dir) + + self.log.info(f"Extracted {instance} to {staging_dir}") + + def _generate_mov(self, ffmpeg_path, instance, fps, no_of_frames, + source_files_pattern, staging_dir): + """Generates .mov to upload to Ftrack. + + Args: + ffmpeg_path (str): path to ffmpeg + instance (Pyblish Instance) + fps (str) + no_of_frames (int): + source_files_pattern (str): name of source file + staging_dir (str): temporary location to store thumbnail + Updates: + instance - adds representation portion + """ + # Generate mov. + mov_path = os.path.join(staging_dir, "review.mov") + self.log.info(f"Generate mov review: {mov_path}") + args = [ + ffmpeg_path, + "-y", + "-i", source_files_pattern, + "-vf", "pad=ceil(iw/2)*2:ceil(ih/2)*2", + "-vframes", str(no_of_frames), + mov_path + ] + self.log.debug("mov args:: {}".format(args)) + _output = run_subprocess(args) + instance.data["representations"].append({ + "name": "mov", + "ext": "mov", + "files": os.path.basename(mov_path), + "stagingDir": staging_dir, + "frameStart": 1, + "frameEnd": no_of_frames, + "fps": fps, + "preview": True, + "tags": self.mov_options['tags'] + }) + + def _generate_thumbnail(self, ffmpeg_path, instance, source_files_pattern, + staging_dir): + """Generates scaled down thumbnail and adds it as representation. + + Args: + ffmpeg_path (str): path to ffmpeg + instance (Pyblish Instance) + source_files_pattern (str): name of source file + staging_dir (str): temporary location to store thumbnail + Updates: + instance - adds representation portion + """ # Generate thumbnail thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg") self.log.info(f"Generate thumbnail {thumbnail_path}") @@ -93,50 +157,16 @@ class ExtractReview(openpype.api.Extractor): thumbnail_path ] self.log.debug("thumbnail args:: {}".format(args)) - output = openpype.lib.run_subprocess(args) - + _output = run_subprocess(args) instance.data["representations"].append({ "name": "thumbnail", "ext": "jpg", + "outputName": "thumb", "files": os.path.basename(thumbnail_path), "stagingDir": staging_dir, - "tags": ["thumbnail"] + "tags": ["thumbnail", "delete"] }) - # Generate mov. - mov_path = os.path.join(staging_dir, "review.mov") - self.log.info(f"Generate mov review: {mov_path}") - img_number = len(img_list) - args = [ - ffmpeg_path, - "-y", - "-i", source_files_pattern, - "-vf", "pad=ceil(iw/2)*2:ceil(ih/2)*2", - "-vframes", str(img_number), - mov_path - ] - self.log.debug("mov args:: {}".format(args)) - output = openpype.lib.run_subprocess(args) - self.log.debug(output) - instance.data["representations"].append({ - "name": "mov", - "ext": "mov", - "files": os.path.basename(mov_path), - "stagingDir": staging_dir, - "frameStart": 1, - "frameEnd": img_number, - "fps": fps, - "preview": True, - "tags": self.mov_options['tags'] - }) - - # Required for extract_review plugin (L222 onwards). - instance.data["frameStart"] = 1 - instance.data["frameEnd"] = img_number - instance.data["fps"] = 25 - - self.log.info(f"Extracted {instance} to {staging_dir}") - def _check_and_resize(self, processed_img_names, source_files_pattern, staging_dir): """Check if saved image could be used in ffmpeg. @@ -165,37 +195,12 @@ class ExtractReview(openpype.api.Extractor): return source_files_pattern - def _get_image_path_from_instances(self, instance): - img_list = [] - - for instance in sorted(instance.context): - if instance.data["family"] != "image": - continue - - for rep in instance.data["representations"]: - img_path = os.path.join( - rep["stagingDir"], - rep["files"] - ) - img_list.append(img_path) - - return img_list - - def _copy_image_to_staging_dir(self, staging_dir, img_list): - copy_files = [] - for i, img_src in enumerate(img_list): - img_filename = self.output_seq_filename % i - img_dst = os.path.join(staging_dir, img_filename) - - self.log.debug( - "Copying file .. {} -> {}".format(img_src, img_dst) - ) - shutil.copy(img_src, img_dst) - copy_files.append(img_filename) - - return copy_files - def _get_layers_from_image_instances(self, instance): + """Collect all layers from 'instance'. + + Returns: + (list) of PSItem + """ layers = [] for image_instance in instance.context: if image_instance.data["family"] != "image": @@ -207,7 +212,12 @@ class ExtractReview(openpype.api.Extractor): return sorted(layers) - def _saves_flattened_layers(self, staging_dir, layers): + def _save_flatten_image(self, staging_dir, layers): + """Creates flat image from 'layers' into 'staging_dir'. + + Returns: + (str): path to new image + """ img_filename = self.output_seq_filename % 0 output_image_path = os.path.join(staging_dir, img_filename) stub = photoshop.stub() @@ -221,7 +231,13 @@ class ExtractReview(openpype.api.Extractor): return img_filename - def _saves_sequences_layers(self, staging_dir, layers): + def _save_sequence_images(self, staging_dir, layers): + """Creates separate flat images from 'layers' into 'staging_dir'. + + Used as source for multi frames .mov to review at once. + Returns: + (list): paths to new images + """ stub = photoshop.stub() list_img_filename = [] diff --git a/openpype/hosts/photoshop/plugins/publish/extract_save_scene.py b/openpype/hosts/photoshop/plugins/publish/extract_save_scene.py index 03086f389f..aa900fec9f 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_save_scene.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_save_scene.py @@ -1,11 +1,11 @@ -import openpype.api +from openpype.pipeline import publish from openpype.hosts.photoshop import api as photoshop -class ExtractSaveScene(openpype.api.Extractor): +class ExtractSaveScene(publish.Extractor): """Save scene before extraction.""" - order = openpype.api.Extractor.order - 0.49 + order = publish.Extractor.order - 0.49 label = "Extract Save Scene" hosts = ["photoshop"] families = ["workfile"] diff --git a/openpype/hosts/photoshop/plugins/publish/increment_workfile.py b/openpype/hosts/photoshop/plugins/publish/increment_workfile.py index 92132c393b..665dd67fc5 100644 --- a/openpype/hosts/photoshop/plugins/publish/increment_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/increment_workfile.py @@ -1,6 +1,6 @@ import os import pyblish.api -from openpype.action import get_errored_plugins_from_data +from openpype.pipeline.publish import get_errored_plugins_from_context from openpype.lib import version_up from openpype.hosts.photoshop import api as photoshop @@ -19,7 +19,7 @@ class IncrementWorkfile(pyblish.api.InstancePlugin): optional = True def process(self, instance): - errored_plugins = get_errored_plugins_from_data(instance.context) + errored_plugins = get_errored_plugins_from_context(instance.context) if errored_plugins: raise RuntimeError( "Skipping incrementing current file because publishing failed." diff --git a/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py b/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py index b65f9d259f..2609f7a8cf 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py @@ -1,7 +1,7 @@ import pyblish.api -import openpype.api from openpype.pipeline import legacy_io +from openpype.pipeline.publish import ValidateContentsOrder from openpype.hosts.photoshop import api as photoshop @@ -45,7 +45,7 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin): label = "Validate Instance Asset" hosts = ["photoshop"] actions = [ValidateInstanceAssetRepair] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder def process(self, instance): instance_asset = instance.data["asset"] diff --git a/openpype/hosts/photoshop/plugins/publish/validate_naming.py b/openpype/hosts/photoshop/plugins/publish/validate_naming.py index 8106d6ff16..0665aff9d0 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_naming.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_naming.py @@ -1,10 +1,13 @@ import re import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError + from openpype.hosts.photoshop import api as photoshop from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateNamingRepair(pyblish.api.Action): @@ -72,7 +75,7 @@ class ValidateNaming(pyblish.api.InstancePlugin): label = "Validate Naming" hosts = ["photoshop"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["image"] actions = [ValidateNamingRepair] diff --git a/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py b/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py index 01f2323157..78e84729ce 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py @@ -1,7 +1,9 @@ import collections import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateSubsetUniqueness(pyblish.api.ContextPlugin): @@ -11,7 +13,7 @@ class ValidateSubsetUniqueness(pyblish.api.ContextPlugin): label = "Validate Subset Uniqueness" hosts = ["photoshop"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["image"] def process(self, context): diff --git a/openpype/hosts/resolve/README.markdown b/openpype/hosts/resolve/README.markdown index 8c9f72fb0c..a8bb071e7e 100644 --- a/openpype/hosts/resolve/README.markdown +++ b/openpype/hosts/resolve/README.markdown @@ -1,22 +1,24 @@ -#### Basic setup +## Basic setup -- Install [latest DaVinci Resolve](https://sw.blackmagicdesign.com/DaVinciResolve/v16.2.8/DaVinci_Resolve_Studio_16.2.8_Windows.zip?Key-Pair-Id=APKAJTKA3ZJMJRQITVEA&Signature=EcFuwQFKHZIBu2zDj5LTCQaQDXcKOjhZY7Fs07WGw24xdDqfwuALOyKu+EVzDX2Tik0cWDunYyV0r7hzp+mHmczp9XP4YaQXHdyhD/2BGWDgiMsiTQbNkBgbfy5MsAMFY8FHCl724Rxm8ke1foWeUVyt/Cdkil+ay+9sL72yFhaSV16sncko1jCIlCZeMkHhbzqPwyRuqLGmxmp8ey9KgBhI3wGFFPN201VMaV+RHrpX+KAfaR6p6dwo3FrPbRHK9TvMI1RA/1lJ3fVtrkDW69LImIKAWmIxgcStUxR9/taqLOD66FNiflHd1tufHv3FBa9iYQsjb3VLMPx7OCwLyg==&Expires=1608308139) -- add absolute path to ffmpeg into openpype settings - ![image](https://user-images.githubusercontent.com/40640033/102630786-43294f00-414d-11eb-98de-f0ae51f62077.png) -- install Python 3.6 into `%LOCALAPPDATA%/Programs/Python/Python36` (only respected path by Resolve) -- install OpenTimelineIO for 3.6 `%LOCALAPPDATA%\Programs\Python\Python36\python.exe -m pip install git+https://github.com/PixarAnimationStudios/OpenTimelineIO.git@5aa24fbe89d615448876948fe4b4900455c9a3e8` and move built files from `%LOCALAPPDATA%/Programs/Python/Python36/Lib/site-packages/opentimelineio/cxx-libs/bin and lib` to `%LOCALAPPDATA%/Programs/Python/Python36/Lib/site-packages/opentimelineio/`. I was building it on Win10 machine with Visual Studio Community 2019 and +- Actually supported version is up to v18 +- install Python 3.6.2 (latest tested v17) or up to 3.9.13 (latest tested on v18) +- pip install PySide2: + - Python 3.9.*: open terminal and go to python.exe directory, then `python -m pip install PySide2` +- pip install OpenTimelineIO: + - Python 3.9.*: open terminal and go to python.exe directory, then `python -m pip install OpenTimelineIO` + - Python 3.6: open terminal and go to python.exe directory, then `python -m pip install git+https://github.com/PixarAnimationStudios/OpenTimelineIO.git@5aa24fbe89d615448876948fe4b4900455c9a3e8` and move built files from `./Lib/site-packages/opentimelineio/cxx-libs/bin and lib` to `./Lib/site-packages/opentimelineio/`. I was building it on Win10 machine with Visual Studio Community 2019 and ![image](https://user-images.githubusercontent.com/40640033/102792588-ffcb1c80-43a8-11eb-9c6b-bf2114ed578e.png) with installed CMake in PATH. -- install PySide2 for 3.6 `%LOCALAPPDATA%\Programs\Python\Python36\python.exe -m pip install PySide2` - make sure Resolve Fusion (Fusion Tab/menu/Fusion/Fusion Settings) is set to Python 3.6 ![image](https://user-images.githubusercontent.com/40640033/102631545-280b0f00-414e-11eb-89fc-98ac268d209d.png) +- Open OpenPype **Tray/Admin/Studio settings** > `applications/resolve/environment` and add Python3 path to `RESOLVE_PYTHON3_HOME` platform related. -#### Editorial setup +## Editorial setup This is how it looks on my testing project timeline ![image](https://user-images.githubusercontent.com/40640033/102637638-96ec6600-4156-11eb-9656-6e8e3ce4baf8.png) Notice I had renamed tracks to `main` (holding metadata markers) and `review` used for generating review data with ffmpeg confersion to jpg sequence. -1. you need to start OpenPype menu from Resolve/EditTab/Menu/Workspace/Scripts/**__OpenPype_Menu__** +1. you need to start OpenPype menu from Resolve/EditTab/Menu/Workspace/Scripts/Comp/**__OpenPype_Menu__** 2. then select any clips in `main` track and change their color to `Chocolate` 3. in OpenPype Menu select `Create` 4. in Creator select `Create Publishable Clip [New]` (temporary name) diff --git a/openpype/hosts/resolve/RESOLVE_API_README_v16.2.0_down.txt b/openpype/hosts/resolve/RESOLVE_API_README_v16.2.0_down.txt deleted file mode 100644 index 139b66bc24..0000000000 --- a/openpype/hosts/resolve/RESOLVE_API_README_v16.2.0_down.txt +++ /dev/null @@ -1,189 +0,0 @@ -Updated as of 08 March 2019 - --------------------------- -In this package, you will find a brief introduction to the Scripting API for DaVinci Resolve Studio. Apart from this README.txt file, this package contains folders containing the basic import modules for scripting access (DaVinciResolve.py) and some representative examples. - -Overview --------- - -As with Blackmagic Design Fusion scripts, user scripts written in Lua and Python programming languages are supported. By default, scripts can be invoked from the Console window in the Fusion page, or via command line. This permission can be changed in Resolve Preferences, to be only from Console, or to be invoked from the local network. Please be aware of the security implications when allowing scripting access from outside of the Resolve application. - - -Using a script --------------- -DaVinci Resolve needs to be running for a script to be invoked. - -For a Resolve script to be executed from an external folder, the script needs to know of the API location. -You may need to set the these environment variables to allow for your Python installation to pick up the appropriate dependencies as shown below: - - Mac OS X: - RESOLVE_SCRIPT_API="/Library/Application Support/Blackmagic Design/DaVinci Resolve/Developer/Scripting/" - RESOLVE_SCRIPT_LIB="/Applications/DaVinci Resolve/DaVinci Resolve.app/Contents/Libraries/Fusion/fusionscript.so" - PYTHONPATH="$PYTHONPATH:$RESOLVE_SCRIPT_API/Modules/" - - Windows: - RESOLVE_SCRIPT_API="%PROGRAMDATA%\\Blackmagic Design\\DaVinci Resolve\\Support\\Developer\\Scripting\\" - RESOLVE_SCRIPT_LIB="C:\\Program Files\\Blackmagic Design\\DaVinci Resolve\\fusionscript.dll" - PYTHONPATH="%PYTHONPATH%;%RESOLVE_SCRIPT_API%\\Modules\\" - - Linux: - RESOLVE_SCRIPT_API="/opt/resolve/Developer/Scripting/" - RESOLVE_SCRIPT_LIB="/opt/resolve/libs/Fusion/fusionscript.so" - PYTHONPATH="$PYTHONPATH:$RESOLVE_SCRIPT_API/Modules/" - (Note: For standard ISO Linux installations, the path above may need to be modified to refer to /home/resolve instead of /opt/resolve) - -As with Fusion scripts, Resolve scripts can also be invoked via the menu and the Console. - -On startup, DaVinci Resolve scans the Utility Scripts directory and enumerates the scripts found in the Script application menu. Placing your script in this folder and invoking it from this menu is the easiest way to use scripts. The Utility Scripts folder is located in: - Mac OS X: /Library/Application Support/Blackmagic Design/DaVinci Resolve/Fusion/Scripts/Comp/ - Windows: %APPDATA%\Blackmagic Design\DaVinci Resolve\Fusion\Scripts\Comp\ - Linux: /opt/resolve/Fusion/Scripts/Comp/ (or /home/resolve/Fusion/Scripts/Comp/ depending on installation) - -The interactive Console window allows for an easy way to execute simple scripting commands, to query or modify properties, and to test scripts. The console accepts commands in Python 2.7, Python 3.6 and Lua and evaluates and executes them immediately. For more information on how to use the Console, please refer to the DaVinci Resolve User Manual. - -This example Python script creates a simple project: - #!/usr/bin/env python - import DaVinciResolveScript as dvr_script - resolve = dvr_script.scriptapp("Resolve") - fusion = resolve.Fusion() - projectManager = resolve.GetProjectManager() - projectManager.CreateProject("Hello World") - -The resolve object is the fundamental starting point for scripting via Resolve. As a native object, it can be inspected for further scriptable properties - using table iteration and `getmetatable` in Lua and dir, help etc in Python (among other methods). A notable scriptable object above is fusion - it allows access to all existing Fusion scripting functionality. - -Running DaVinci Resolve in headless mode ----------------------------------------- - -DaVinci Resolve can be launched in a headless mode without the user interface using the -nogui command line option. When DaVinci Resolve is launched using this option, the user interface is disabled. However, the various scripting APIs will continue to work as expected. - -Basic Resolve API ------------------ - -Some commonly used API functions are described below (*). As with the resolve object, each object is inspectable for properties and functions. - - -Resolve - Fusion() --> Fusion # Returns the Fusion object. Starting point for Fusion scripts. - GetMediaStorage() --> MediaStorage # Returns media storage object to query and act on media locations. - GetProjectManager() --> ProjectManager # Returns project manager object for currently open database. - OpenPage(pageName) --> None # Switches to indicated page in DaVinci Resolve. Input can be one of ("media", "edit", "fusion", "color", "fairlight", "deliver"). -ProjectManager - CreateProject(projectName) --> Project # Creates and returns a project if projectName (text) is unique, and None if it is not. - LoadProject(projectName) --> Project # Loads and returns the project with name = projectName (text) if there is a match found, and None if there is no matching Project. - GetCurrentProject() --> Project # Returns the currently loaded Resolve project. - SaveProject() --> Bool # Saves the currently loaded project with its own name. Returns True if successful. - CreateFolder(folderName) --> Bool # Creates a folder if folderName (text) is unique. - GetProjectsInCurrentFolder() --> [project names...] # Returns an array of project names in current folder. - GetFoldersInCurrentFolder() --> [folder names...] # Returns an array of folder names in current folder. - GotoRootFolder() --> Bool # Opens root folder in database. - GotoParentFolder() --> Bool # Opens parent folder of current folder in database if current folder has parent. - OpenFolder(folderName) --> Bool # Opens folder under given name. - ImportProject(filePath) --> Bool # Imports a project under given file path. Returns true in case of success. - ExportProject(projectName, filePath) --> Bool # Exports a project based on given name into provided file path. Returns true in case of success. - RestoreProject(filePath) --> Bool # Restores a project under given backup file path. Returns true in case of success. -Project - GetMediaPool() --> MediaPool # Returns the Media Pool object. - GetTimelineCount() --> int # Returns the number of timelines currently present in the project. - GetTimelineByIndex(idx) --> Timeline # Returns timeline at the given index, 1 <= idx <= project.GetTimelineCount() - GetCurrentTimeline() --> Timeline # Returns the currently loaded timeline. - SetCurrentTimeline(timeline) --> Bool # Sets given timeline as current timeline for the project. Returns True if successful. - GetName() --> string # Returns project name. - SetName(projectName) --> Bool # Sets project name if given projectname (text) is unique. - GetPresets() --> [presets...] # Returns a table of presets and their information. - SetPreset(presetName) --> Bool # Sets preset by given presetName (string) into project. - GetRenderJobs() --> [render jobs...] # Returns a table of render jobs and their information. - GetRenderPresets() --> [presets...] # Returns a table of render presets and their information. - StartRendering(index1, index2, ...) --> Bool # Starts rendering for given render jobs based on their indices. If no parameter is given rendering would start for all render jobs. - StartRendering([idxs...]) --> Bool # Starts rendering for given render jobs based on their indices. If no parameter is given rendering would start for all render jobs. - StopRendering() --> None # Stops rendering for all render jobs. - IsRenderingInProgress() --> Bool # Returns true is rendering is in progress. - AddRenderJob() --> Bool # Adds render job to render queue. - DeleteRenderJobByIndex(idx) --> Bool # Deletes render job based on given job index (int). - DeleteAllRenderJobs() --> Bool # Deletes all render jobs. - LoadRenderPreset(presetName) --> Bool # Sets a preset as current preset for rendering if presetName (text) exists. - SaveAsNewRenderPreset(presetName) --> Bool # Creates a new render preset by given name if presetName(text) is unique. - SetRenderSettings([settings map]) --> Bool # Sets given settings for rendering. Settings map is a map, keys of map are: "SelectAllFrames", "MarkIn", "MarkOut", "TargetDir", "CustomName". - GetRenderJobStatus(idx) --> [status info] # Returns job status and completion rendering percentage of the job by given job index (int). - GetSetting(settingName) --> string # Returns setting value by given settingName (string) if the setting exist. With empty settingName the function returns a full list of settings. - SetSetting(settingName, settingValue) --> Bool # Sets project setting base on given name (string) and value (string). - GetRenderFormats() --> [render formats...]# Returns a list of available render formats. - GetRenderCodecs(renderFormat) --> [render codecs...] # Returns a list of available codecs for given render format (string). - GetCurrentRenderFormatAndCodec() --> [format, codec] # Returns currently selected render format and render codec. - SetCurrentRenderFormatAndCodec(format, codec) --> Bool # Sets given render format (string) and render codec (string) as options for rendering. -MediaStorage - GetMountedVolumes() --> [paths...] # Returns an array of folder paths corresponding to mounted volumes displayed in Resolve’s Media Storage. - GetSubFolders(folderPath) --> [paths...] # Returns an array of folder paths in the given absolute folder path. - GetFiles(folderPath) --> [paths...] # Returns an array of media and file listings in the given absolute folder path. Note that media listings may be logically consolidated entries. - RevealInStorage(path) --> None # Expands and displays a given file/folder path in Resolve’s Media Storage. - AddItemsToMediaPool(item1, item2, ...) --> [clips...] # Adds specified file/folder paths from Media Store into current Media Pool folder. Input is one or more file/folder paths. - AddItemsToMediaPool([items...]) --> [clips...] # Adds specified file/folder paths from Media Store into current Media Pool folder. Input is an array of file/folder paths. -MediaPool - GetRootFolder() --> Folder # Returns the root Folder of Media Pool - AddSubFolder(folder, name) --> Folder # Adds a new subfolder under specified Folder object with the given name. - CreateEmptyTimeline(name) --> Timeline # Adds a new timeline with given name. - AppendToTimeline(clip1, clip2...) --> Bool # Appends specified MediaPoolItem objects in the current timeline. Returns True if successful. - AppendToTimeline([clips]) --> Bool # Appends specified MediaPoolItem objects in the current timeline. Returns True if successful. - CreateTimelineFromClips(name, clip1, clip2, ...)--> Timeline # Creates a new timeline with specified name, and appends the specified MediaPoolItem objects. - CreateTimelineFromClips(name, [clips]) --> Timeline # Creates a new timeline with specified name, and appends the specified MediaPoolItem objects. - ImportTimelineFromFile(filePath) --> Timeline # Creates timeline based on parameters within given file. - GetCurrentFolder() --> Folder # Returns currently selected Folder. - SetCurrentFolder(Folder) --> Bool # Sets current folder by given Folder. -Folder - GetClips() --> [clips...] # Returns a list of clips (items) within the folder. - GetName() --> string # Returns user-defined name of the folder. - GetSubFolders() --> [folders...] # Returns a list of subfolders in the folder. -MediaPoolItem - GetMetadata(metadataType) --> [[types],[values]] # Returns a value of metadataType. If parameter is not specified returns all set metadata parameters. - SetMetadata(metadataType, metadataValue) --> Bool # Sets metadata by given type and value. Returns True if successful. - GetMediaId() --> string # Returns a unique ID name related to MediaPoolItem. - AddMarker(frameId, color, name, note, duration) --> Bool # Creates a new marker at given frameId position and with given marker information. - GetMarkers() --> [markers...] # Returns a list of all markers and their information. - AddFlag(color) --> Bool # Adds a flag with given color (text). - GetFlags() --> [colors...] # Returns a list of flag colors assigned to the item. - GetClipColor() --> string # Returns an item color as a string. - GetClipProperty(propertyName) --> [[types],[values]] # Returns property value related to the item based on given propertyName (string). if propertyName is empty then it returns a full list of properties. - SetClipProperty(propertyName, propertyValue) --> Bool # Sets into given propertyName (string) propertyValue (string). -Timeline - GetName() --> string # Returns user-defined name of the timeline. - SetName(timelineName) --> Bool # Sets timeline name is timelineName (text) is unique. - GetStartFrame() --> int # Returns frame number at the start of timeline. - GetEndFrame() --> int # Returns frame number at the end of timeline. - GetTrackCount(trackType) --> int # Returns a number of track based on specified track type ("audio", "video" or "subtitle"). - GetItemsInTrack(trackType, index) --> [items...] # Returns an array of Timeline items on the video or audio track (based on trackType) at specified index. 1 <= index <= GetTrackCount(trackType). - AddMarker(frameId, color, name, note, duration) --> Bool # Creates a new marker at given frameId position and with given marker information. - GetMarkers() --> [markers...] # Returns a list of all markers and their information. - ApplyGradeFromDRX(path, gradeMode, item1, item2, ...)--> Bool # Loads a still from given file path (string) and applies grade to Timeline Items with gradeMode (int): 0 - "No keyframes", 1 - "Source Timecode aligned", 2 - "Start Frames aligned". - ApplyGradeFromDRX(path, gradeMode, [items]) --> Bool # Loads a still from given file path (string) and applies grade to Timeline Items with gradeMode (int): 0 - "No keyframes", 1 - "Source Timecode aligned", 2 - "Start Frames aligned". - GetCurrentTimecode() --> string # Returns a string representing a timecode for current position of the timeline, while on Cut, Edit, Color and Deliver page. - GetCurrentVideoItem() --> item # Returns current video timeline item. - GetCurrentClipThumbnailImage() --> [width, height, format, data] # Returns raw thumbnail image data (This image data is encoded in base 64 format and the image format is RGB 8 bit) for the current media in the Color Page in the format of dictionary (in Python) and table (in Lua). Information return are "width", "height", "format" and "data". Example is provided in 6_get_current_media_thumbnail.py in Example folder. -TimelineItem - GetName() --> string # Returns a name of the item. - GetDuration() --> int # Returns a duration of item. - GetEnd() --> int # Returns a position of end frame. - GetFusionCompCount() --> int # Returns the number of Fusion compositions associated with the timeline item. - GetFusionCompByIndex(compIndex) --> fusionComp # Returns Fusion composition object based on given index. 1 <= compIndex <= timelineItem.GetFusionCompCount() - GetFusionCompNames() --> [names...] # Returns a list of Fusion composition names associated with the timeline item. - GetFusionCompByName(compName) --> fusionComp # Returns Fusion composition object based on given name. - GetLeftOffset() --> int # Returns a maximum extension by frame for clip from left side. - GetRightOffset() --> int # Returns a maximum extension by frame for clip from right side. - GetStart() --> int # Returns a position of first frame. - AddMarker(frameId, color, name, note, duration) --> Bool # Creates a new marker at given frameId position and with given marker information. - GetMarkers() --> [markers...] # Returns a list of all markers and their information. - GetFlags() --> [colors...] # Returns a list of flag colors assigned to the item. - GetClipColor() --> string # Returns an item color as a string. - AddFusionComp() --> fusionComp # Adds a new Fusion composition associated with the timeline item. - ImportFusionComp(path) --> fusionComp # Imports Fusion composition from given file path by creating and adding a new composition for the item. - ExportFusionComp(path, compIndex) --> Bool # Exports Fusion composition based on given index into provided file name path. - DeleteFusionCompByName(compName) --> Bool # Deletes Fusion composition by provided name. - LoadFusionCompByName(compName) --> fusionComp # Loads Fusion composition by provided name and sets it as active composition. - RenameFusionCompByName(oldName, newName) --> Bool # Renames Fusion composition by provided name with new given name. - AddVersion(versionName, versionType) --> Bool # Adds a new Version associated with the timeline item. versionType: 0 - local, 1 - remote. - DeleteVersionByName(versionName, versionType) --> Bool # Deletes Version by provided name. versionType: 0 - local, 1 - remote. - LoadVersionByName(versionName, versionType) --> Bool # Loads Version by provided name and sets it as active Version. versionType: 0 - local, 1 - remote. - RenameVersionByName(oldName, newName, versionType)--> Bool # Renames Version by provided name with new given name. versionType: 0 - local, 1 - remote. - GetMediaPoolItem() --> MediaPoolItem # Returns a corresponding to the timeline item media pool item if it exists. - GetVersionNames(versionType) --> [strings...] # Returns a list of version names by provided versionType: 0 - local, 1 - remote. - GetStereoConvergenceValues() --> [offset, value] # Returns a table of keyframe offsets and respective convergence values - GetStereoLeftFloatingWindowParams() --> [offset, value] # For the LEFT eye -> returns a table of keyframe offsets and respective floating window params. Value at particular offset includes the left, right, top and bottom floating window values - GetStereoRightFloatingWindowParams() --> [offset, value] # For the RIGHT eye -> returns a table of keyframe offsets and respective floating window params. Value at particular offset includes the left, right, top and bottom floating window values diff --git a/openpype/hosts/resolve/RESOLVE_API_README_v16.2.0_up.txt b/openpype/hosts/resolve/RESOLVE_API_v18.0.4.txt similarity index 70% rename from openpype/hosts/resolve/RESOLVE_API_README_v16.2.0_up.txt rename to openpype/hosts/resolve/RESOLVE_API_v18.0.4.txt index f1b8b81a71..98597a12cb 100644 --- a/openpype/hosts/resolve/RESOLVE_API_README_v16.2.0_up.txt +++ b/openpype/hosts/resolve/RESOLVE_API_v18.0.4.txt @@ -1,5 +1,5 @@ -Updated as of 20 October 2020 ------------------------------ +Updated as of 9 May 2022 +---------------------------- In this package, you will find a brief introduction to the Scripting API for DaVinci Resolve Studio. Apart from this README.txt file, this package contains folders containing the basic import modules for scripting access (DaVinciResolve.py) and some representative examples. @@ -89,12 +89,25 @@ Resolve Fusion() --> Fusion # Returns the Fusion object. Starting point for Fusion scripts. GetMediaStorage() --> MediaStorage # Returns the media storage object to query and act on media locations. GetProjectManager() --> ProjectManager # Returns the project manager object for currently open database. - OpenPage(pageName) --> None # Switches to indicated page in DaVinci Resolve. Input can be one of ("media", "cut", "edit", "fusion", "color", "fairlight", "deliver"). + OpenPage(pageName) --> Bool # Switches to indicated page in DaVinci Resolve. Input can be one of ("media", "cut", "edit", "fusion", "color", "fairlight", "deliver"). + GetCurrentPage() --> String # Returns the page currently displayed in the main window. Returned value can be one of ("media", "cut", "edit", "fusion", "color", "fairlight", "deliver", None). GetProductName() --> string # Returns product name. GetVersion() --> [version fields] # Returns list of product version fields in [major, minor, patch, build, suffix] format. GetVersionString() --> string # Returns product version in "major.minor.patch[suffix].build" format. + LoadLayoutPreset(presetName) --> Bool # Loads UI layout from saved preset named 'presetName'. + UpdateLayoutPreset(presetName) --> Bool # Overwrites preset named 'presetName' with current UI layout. + ExportLayoutPreset(presetName, presetFilePath) --> Bool # Exports preset named 'presetName' to path 'presetFilePath'. + DeleteLayoutPreset(presetName) --> Bool # Deletes preset named 'presetName'. + SaveLayoutPreset(presetName) --> Bool # Saves current UI layout as a preset named 'presetName'. + ImportLayoutPreset(presetFilePath, presetName) --> Bool # Imports preset from path 'presetFilePath'. The optional argument 'presetName' specifies how the preset shall be named. If not specified, the preset is named based on the filename. + Quit() --> None # Quits the Resolve App. ProjectManager + ArchiveProject(projectName, + filePath, + isArchiveSrcMedia=True, + isArchiveRenderCache=True, + isArchiveProxyMedia=False) --> Bool # Archives project to provided file path with the configuration as provided by the optional arguments CreateProject(projectName) --> Project # Creates and returns a project if projectName (string) is unique, and None if it is not. DeleteProject(projectName) --> Bool # Delete project in the current folder if not currently loaded LoadProject(projectName) --> Project # Loads and returns the project with name = projectName (string) if there is a match found, and None if there is no matching Project. @@ -109,9 +122,9 @@ ProjectManager GotoParentFolder() --> Bool # Opens parent folder of current folder in database if current folder has parent. GetCurrentFolder() --> string # Returns the current folder name. OpenFolder(folderName) --> Bool # Opens folder under given name. - ImportProject(filePath) --> Bool # Imports a project from the file path provided. Returns True if successful. + ImportProject(filePath, projectName=None) --> Bool # Imports a project from the file path provided with given project name, if any. Returns True if successful. ExportProject(projectName, filePath, withStillsAndLUTs=True) --> Bool # Exports project to provided file path, including stills and LUTs if withStillsAndLUTs is True (enabled by default). Returns True in case of success. - RestoreProject(filePath) --> Bool # Restores a project from the file path provided. Returns True if successful. + RestoreProject(filePath, projectName=None) --> Bool # Restores a project from the file path provided with given project name, if any. Returns True if successful. GetCurrentDatabase() --> {dbInfo} # Returns a dictionary (with keys 'DbType', 'DbName' and optional 'IpAddress') corresponding to the current database connection GetDatabaseList() --> [{dbInfo}] # Returns a list of dictionary items (with keys 'DbType', 'DbName' and optional 'IpAddress') corresponding to all the databases added to Resolve SetCurrentDatabase({dbInfo}) --> Bool # Switches current database connection to the database specified by the keys below, and closes any open project. @@ -125,8 +138,9 @@ Project GetTimelineByIndex(idx) --> Timeline # Returns timeline at the given index, 1 <= idx <= project.GetTimelineCount() GetCurrentTimeline() --> Timeline # Returns the currently loaded timeline. SetCurrentTimeline(timeline) --> Bool # Sets given timeline as current timeline for the project. Returns True if successful. + GetGallery() --> Gallery # Returns the Gallery object. GetName() --> string # Returns project name. - SetName(projectName) --> Bool # Sets project name if given projectname (string) is unique. + SetName(projectName) --> Bool # Sets project name if given projectName (string) is unique. GetPresetList() --> [presets...] # Returns a list of presets and their information. SetPreset(presetName) --> Bool # Sets preset by given presetName (string) into project. AddRenderJob() --> string # Adds a render job based on current render settings to the render queue. Returns a unique job id (string) for the new render job. @@ -144,27 +158,7 @@ Project LoadRenderPreset(presetName) --> Bool # Sets a preset as current preset for rendering if presetName (string) exists. SaveAsNewRenderPreset(presetName) --> Bool # Creates new render preset by given name if presetName(string) is unique. SetRenderSettings({settings}) --> Bool # Sets given settings for rendering. Settings is a dict, with support for the keys: - # "SelectAllFrames": Bool - # "MarkIn": int - # "MarkOut": int - # "TargetDir": string - # "CustomName": string - # "UniqueFilenameStyle": 0 - Prefix, 1 - Suffix. - # "ExportVideo": Bool - # "ExportAudio": Bool - # "FormatWidth": int - # "FormatHeight": int - # "FrameRate": float (examples: 23.976, 24) - # "PixelAspectRatio": string (for SD resolution: "16_9" or "4_3") (other resolutions: "square" or "cinemascope") - # "VideoQuality" possible values for current codec (if applicable): - # 0 (int) - will set quality to automatic - # [1 -> MAX] (int) - will set input bit rate - # ["Least", "Low", "Medium", "High", "Best"] (String) - will set input quality level - # "AudioCodec": string (example: "aac") - # "AudioBitDepth": int - # "AudioSampleRate": int - # "ColorSpaceTag" : string (example: "Same as Project", "AstroDesign") - # "GammaTag" : string (example: "Same as Project", "ACEScct") + # Refer to "Looking up render settings" section for information for supported settings GetRenderJobStatus(jobId) --> {status info} # Returns a dict with job status and completion percentage of the job by given jobId (string). GetSetting(settingName) --> string # Returns value of project setting (indicated by settingName, string). Check the section below for more information. SetSetting(settingName, settingValue) --> Bool # Sets the project setting (indicated by settingName, string) to the value (settingValue, string). Check the section below for more information. @@ -176,12 +170,13 @@ Project SetCurrentRenderMode(renderMode) --> Bool # Sets the render mode. Specify renderMode = 0 for Individual clips, 1 for Single clip. GetRenderResolutions(format, codec) --> [{Resolution}] # Returns list of resolutions applicable for the given render format (string) and render codec (string). Returns full list of resolutions if no argument is provided. Each element in the list is a dictionary with 2 keys "Width" and "Height". RefreshLUTList() --> Bool # Refreshes LUT List + GetUniqueId() --> string # Returns a unique ID for the project item MediaStorage GetMountedVolumeList() --> [paths...] # Returns list of folder paths corresponding to mounted volumes displayed in Resolve’s Media Storage. GetSubFolderList(folderPath) --> [paths...] # Returns list of folder paths in the given absolute folder path. GetFileList(folderPath) --> [paths...] # Returns list of media and file listings in the given absolute folder path. Note that media listings may be logically consolidated entries. - RevealInStorage(path) --> None # Expands and displays given file/folder path in Resolve’s Media Storage. + RevealInStorage(path) --> Bool # Expands and displays given file/folder path in Resolve’s Media Storage. AddItemListToMediaPool(item1, item2, ...) --> [clips...] # Adds specified file/folder paths from Media Storage into current Media Pool folder. Input is one or more file/folder paths. Returns a list of the MediaPoolItems created. AddItemListToMediaPool([items...]) --> [clips...] # Adds specified file/folder paths from Media Storage into current Media Pool folder. Input is an array of file/folder paths. Returns a list of the MediaPoolItems created. AddClipMattesToMediaPool(MediaPoolItem, [paths], stereoEye) --> Bool # Adds specified media files as mattes for the specified MediaPoolItem. StereoEye is an optional argument for specifying which eye to add the matte to for stereo clips ("left" or "right"). Returns True if successful. @@ -190,10 +185,11 @@ MediaStorage MediaPool GetRootFolder() --> Folder # Returns root Folder of Media Pool AddSubFolder(folder, name) --> Folder # Adds new subfolder under specified Folder object with the given name. + RefreshFolders() --> Bool # Updates the folders in collaboration mode CreateEmptyTimeline(name) --> Timeline # Adds new timeline with given name. - AppendToTimeline(clip1, clip2, ...) --> Bool # Appends specified MediaPoolItem objects in the current timeline. Returns True if successful. - AppendToTimeline([clips]) --> Bool # Appends specified MediaPoolItem objects in the current timeline. Returns True if successful. - AppendToTimeline([{clipInfo}, ...]) --> Bool # Appends list of clipInfos specified as dict of "mediaPoolItem", "startFrame" (int), "endFrame" (int). + AppendToTimeline(clip1, clip2, ...) --> [TimelineItem] # Appends specified MediaPoolItem objects in the current timeline. Returns the list of appended timelineItems. + AppendToTimeline([clips]) --> [TimelineItem] # Appends specified MediaPoolItem objects in the current timeline. Returns the list of appended timelineItems. + AppendToTimeline([{clipInfo}, ...]) --> [TimelineItem] # Appends list of clipInfos specified as dict of "mediaPoolItem", "startFrame" (int), "endFrame" (int), (optional) "mediaType" (int; 1 - Video only, 2 - Audio only). Returns the list of appended timelineItems. CreateTimelineFromClips(name, clip1, clip2,...) --> Timeline # Creates new timeline with specified name, and appends the specified MediaPoolItem objects. CreateTimelineFromClips(name, [clips]) --> Timeline # Creates new timeline with specified name, and appends the specified MediaPoolItem objects. CreateTimelineFromClips(name, [{clipInfo}]) --> Timeline # Creates new timeline with specified name, appending the list of clipInfos specified as a dict of "mediaPoolItem", "startFrame" (int), "endFrame" (int). @@ -202,6 +198,8 @@ MediaPool # "importSourceClips": Bool, specifies whether source clips should be imported, True by default # "sourceClipsPath": string, specifies a filesystem path to search for source clips if the media is inaccessible in their original path and if "importSourceClips" is True # "sourceClipsFolders": List of Media Pool folder objects to search for source clips if the media is not present in current folder and if "importSourceClips" is False + # "interlaceProcessing": Bool, specifies whether to enable interlace processing on the imported timeline being created. valid only for AAF import + DeleteTimelines([timeline]) --> Bool # Deletes specified timelines in the media pool. GetCurrentFolder() --> Folder # Returns currently selected Folder. SetCurrentFolder(Folder) --> Bool # Sets current folder by given Folder. DeleteClips([clips]) --> Bool # Deletes specified clips or timeline mattes in the media pool @@ -214,19 +212,26 @@ MediaPool RelinkClips([MediaPoolItem], folderPath) --> Bool # Update the folder location of specified media pool clips with the specified folder path. UnlinkClips([MediaPoolItem]) --> Bool # Unlink specified media pool clips. ImportMedia([items...]) --> [MediaPoolItems] # Imports specified file/folder paths into current Media Pool folder. Input is an array of file/folder paths. Returns a list of the MediaPoolItems created. + ImportMedia([{clipInfo}]) --> [MediaPoolItems] # Imports file path(s) into current Media Pool folder as specified in list of clipInfo dict. Returns a list of the MediaPoolItems created. + # Each clipInfo gets imported as one MediaPoolItem unless 'Show Individual Frames' is turned on. + # Example: ImportMedia([{"FilePath":"file_%03d.dpx", "StartIndex":1, "EndIndex":100}]) would import clip "file_[001-100].dpx". ExportMetadata(fileName, [clips]) --> Bool # Exports metadata of specified clips to 'fileName' in CSV format. # If no clips are specified, all clips from media pool will be used. + GetUniqueId() --> string # Returns a unique ID for the media pool Folder GetClipList() --> [clips...] # Returns a list of clips (items) within the folder. GetName() --> string # Returns the media folder name. GetSubFolderList() --> [folders...] # Returns a list of subfolders in the folder. + GetIsFolderStale() --> bool # Returns true if folder is stale in collaboration mode, false otherwise + GetUniqueId() --> string # Returns a unique ID for the media pool folder MediaPoolItem GetName() --> string # Returns the clip name. GetMetadata(metadataType=None) --> string|dict # Returns the metadata value for the key 'metadataType'. # If no argument is specified, a dict of all set metadata properties is returned. SetMetadata(metadataType, metadataValue) --> Bool # Sets the given metadata to metadataValue (string). Returns True if successful. + SetMetadata({metadata}) --> Bool # Sets the item metadata with specified 'metadata' dict. Returns True if successful. GetMediaId() --> string # Returns the unique ID for the MediaPoolItem. AddMarker(frameId, color, name, note, duration, --> Bool # Creates a new marker at given frameId position and with given marker information. 'customData' is optional and helps to attach user specific data to the marker. customData) @@ -248,15 +253,18 @@ MediaPoolItem GetClipProperty(propertyName=None) --> string|dict # Returns the property value for the key 'propertyName'. # If no argument is specified, a dict of all clip properties is returned. Check the section below for more information. SetClipProperty(propertyName, propertyValue) --> Bool # Sets the given property to propertyValue (string). Check the section below for more information. - LinkProxyMedia(propertyName) --> Bool # Links proxy media (absolute path) with the current clip. + LinkProxyMedia(proxyMediaFilePath) --> Bool # Links proxy media located at path specified by arg 'proxyMediaFilePath' with the current clip. 'proxyMediaFilePath' should be absolute clip path. UnlinkProxyMedia() --> Bool # Unlinks any proxy media associated with clip. ReplaceClip(filePath) --> Bool # Replaces the underlying asset and metadata of MediaPoolItem with the specified absolute clip path. + GetUniqueId() --> string # Returns a unique ID for the media pool item Timeline GetName() --> string # Returns the timeline name. SetName(timelineName) --> Bool # Sets the timeline name if timelineName (string) is unique. Returns True if successful. GetStartFrame() --> int # Returns the frame number at the start of timeline. GetEndFrame() --> int # Returns the frame number at the end of timeline. + SetStartTimecode(timecode) --> Bool # Set the start timecode of the timeline to the string 'timecode'. Returns true when the change is successful, false otherwise. + GetStartTimecode() --> string # Returns the start timecode for the timeline. GetTrackCount(trackType) --> int # Returns the number of tracks for the given track type ("audio", "video" or "subtitle"). GetItemListInTrack(trackType, index) --> [items...] # Returns a list of timeline items on that track (based on trackType and index). 1 <= index <= GetTrackCount(trackType). AddMarker(frameId, color, name, note, duration, --> Bool # Creates a new marker at given frameId position and with given marker information. 'customData' is optional and helps to attach user specific data to the marker. @@ -271,7 +279,8 @@ Timeline DeleteMarkerByCustomData(customData) --> Bool # Delete first matching marker with specified customData. ApplyGradeFromDRX(path, gradeMode, item1, item2, ...)--> Bool # Loads a still from given file path (string) and applies grade to Timeline Items with gradeMode (int): 0 - "No keyframes", 1 - "Source Timecode aligned", 2 - "Start Frames aligned". ApplyGradeFromDRX(path, gradeMode, [items]) --> Bool # Loads a still from given file path (string) and applies grade to Timeline Items with gradeMode (int): 0 - "No keyframes", 1 - "Source Timecode aligned", 2 - "Start Frames aligned". - GetCurrentTimecode() --> string # Returns a string timecode representation for the current playhead position, while on Cut, Edit, Color and Deliver pages. + GetCurrentTimecode() --> string # Returns a string timecode representation for the current playhead position, while on Cut, Edit, Color, Fairlight and Deliver pages. + SetCurrentTimecode(timecode) --> Bool # Sets current playhead position from input timecode for Cut, Edit, Color, Fairlight and Deliver pages. GetCurrentVideoItem() --> item # Returns the current video timeline item. GetCurrentClipThumbnailImage() --> {thumbnailData} # Returns a dict (keys "width", "height", "format" and "data") with data containing raw thumbnail image data (RGB 8-bit image data encoded in base64 format) for current media in the Color Page. # An example of how to retrieve and interpret thumbnails is provided in 6_get_current_media_thumbnail.py in the Examples folder. @@ -280,37 +289,30 @@ Timeline DuplicateTimeline(timelineName) --> timeline # Duplicates the timeline and returns the created timeline, with the (optional) timelineName, on success. CreateCompoundClip([timelineItems], {clipInfo}) --> timelineItem # Creates a compound clip of input timeline items with an optional clipInfo map: {"startTimecode" : "00:00:00:00", "name" : "Compound Clip 1"}. It returns the created timeline item. CreateFusionClip([timelineItems]) --> timelineItem # Creates a Fusion clip of input timeline items. It returns the created timeline item. + ImportIntoTimeline(filePath, {importOptions}) --> Bool # Imports timeline items from an AAF file and optional importOptions dict into the timeline, with support for the keys: + # "autoImportSourceClipsIntoMediaPool": Bool, specifies if source clips should be imported into media pool, True by default + # "ignoreFileExtensionsWhenMatching": Bool, specifies if file extensions should be ignored when matching, False by default + # "linkToSourceCameraFiles": Bool, specifies if link to source camera files should be enabled, False by default + # "useSizingInfo": Bool, specifies if sizing information should be used, False by default + # "importMultiChannelAudioTracksAsLinkedGroups": Bool, specifies if multi-channel audio tracks should be imported as linked groups, False by default + # "insertAdditionalTracks": Bool, specifies if additional tracks should be inserted, True by default + # "insertWithOffset": string, specifies insert with offset value in timecode format - defaults to "00:00:00:00", applicable if "insertAdditionalTracks" is False + # "sourceClipsPath": string, specifies a filesystem path to search for source clips if the media is inaccessible in their original path and if "ignoreFileExtensionsWhenMatching" is True + # "sourceClipsFolders": string, list of Media Pool folder objects to search for source clips if the media is not present in current folder + Export(fileName, exportType, exportSubtype) --> Bool # Exports timeline to 'fileName' as per input exportType & exportSubtype format. - # exportType can be one of the following constants: - # resolve.EXPORT_AAF - # resolve.EXPORT_DRT - # resolve.EXPORT_EDL - # resolve.EXPORT_FCP_7_XML - # resolve.EXPORT_FCPXML_1_3 - # resolve.EXPORT_FCPXML_1_4 - # resolve.EXPORT_FCPXML_1_5 - # resolve.EXPORT_FCPXML_1_6 - # resolve.EXPORT_FCPXML_1_7 - # resolve.EXPORT_FCPXML_1_8 - # resolve.EXPORT_HDR_10_PROFILE_A - # resolve.EXPORT_HDR_10_PROFILE_B - # resolve.EXPORT_TEXT_CSV - # resolve.EXPORT_TEXT_TAB - # resolve.EXPORT_DOLBY_VISION_VER_2_9 - # resolve.EXPORT_DOLBY_VISION_VER_4_0 - # exportSubtype can be one of the following enums: - # resolve.EXPORT_NONE - # resolve.EXPORT_AAF_NEW - # resolve.EXPORT_AAF_EXISTING - # resolve.EXPORT_CDL - # resolve.EXPORT_SDL - # resolve.EXPORT_MISSING_CLIPS - # Please note that exportSubType is a required parameter for resolve.EXPORT_AAF and resolve.EXPORT_EDL. For rest of the exportType, exportSubtype is ignored. - # When exportType is resolve.EXPORT_AAF, valid exportSubtype values are resolve.EXPORT_AAF_NEW and resolve.EXPORT_AAF_EXISTING. - # When exportType is resolve.EXPORT_EDL, valid exportSubtype values are resolve.EXPORT_CDL, resolve.EXPORT_SDL, resolve.EXPORT_MISSING_CLIPS and resolve.EXPORT_NONE. - # Note: Replace 'resolve.' when using the constants above, if a different Resolve class instance name is used. + # Refer to section "Looking up timeline exports properties" for information on the parameters. GetSetting(settingName) --> string # Returns value of timeline setting (indicated by settingName : string). Check the section below for more information. SetSetting(settingName, settingValue) --> Bool # Sets timeline setting (indicated by settingName : string) to the value (settingValue : string). Check the section below for more information. + InsertGeneratorIntoTimeline(generatorName) --> TimelineItem # Inserts a generator (indicated by generatorName : string) into the timeline. + InsertFusionGeneratorIntoTimeline(generatorName) --> TimelineItem # Inserts a Fusion generator (indicated by generatorName : string) into the timeline. + InsertFusionCompositionIntoTimeline() --> TimelineItem # Inserts a Fusion composition into the timeline. + InsertOFXGeneratorIntoTimeline(generatorName) --> TimelineItem # Inserts an OFX generator (indicated by generatorName : string) into the timeline. + InsertTitleIntoTimeline(titleName) --> TimelineItem # Inserts a title (indicated by titleName : string) into the timeline. + InsertFusionTitleIntoTimeline(titleName) --> TimelineItem # Inserts a Fusion title (indicated by titleName : string) into the timeline. + GrabStill() --> galleryStill # Grabs still from the current video clip. Returns a GalleryStill object. + GrabAllStills(stillFrameSource) --> [galleryStill] # Grabs stills from all the clips of the timeline at 'stillFrameSource' (1 - First frame, 2 - Middle frame). Returns the list of GalleryStill objects. + GetUniqueId() --> string # Returns a unique ID for the timeline TimelineItem GetName() --> string # Returns the item name. @@ -323,6 +325,10 @@ TimelineItem GetLeftOffset() --> int # Returns the maximum extension by frame for clip from left side. GetRightOffset() --> int # Returns the maximum extension by frame for clip from right side. GetStart() --> int # Returns the start frame position on the timeline. + SetProperty(propertyKey, propertyValue) --> Bool # Sets the value of property "propertyKey" to value "propertyValue" + # Refer to "Looking up Timeline item properties" for more information + GetProperty(propertyKey) --> int/[key:value] # returns the value of the specified key + # if no key is specified, the method returns a dictionary(python) or table(lua) for all supported keys AddMarker(frameId, color, name, note, duration, --> Bool # Creates a new marker at given frameId position and with given marker information. 'customData' is optional and helps to attach user specific data to the marker. customData) GetMarkers() --> {markers...} # Returns a dict (frameId -> {information}) of all markers and dicts with their information. @@ -345,7 +351,8 @@ TimelineItem DeleteFusionCompByName(compName) --> Bool # Deletes the named Fusion composition. LoadFusionCompByName(compName) --> fusionComp # Loads the named Fusion composition as the active composition. RenameFusionCompByName(oldName, newName) --> Bool # Renames the Fusion composition identified by oldName. - AddVersion(versionName, versionType) --> Bool # Adds a new color version for a video clipbased on versionType (0 - local, 1 - remote). + AddVersion(versionName, versionType) --> Bool # Adds a new color version for a video clip based on versionType (0 - local, 1 - remote). + GetCurrentVersion() --> {versionName...} # Returns the current version of the video clip. The returned value will have the keys versionName and versionType(0 - local, 1 - remote). DeleteVersionByName(versionName, versionType) --> Bool # Deletes a color version by name and versionType (0 - local, 1 - remote). LoadVersionByName(versionName, versionType) --> Bool # Loads a named color version as the active version. versionType: 0 - local, 1 - remote. RenameVersionByName(oldName, newName, versionType)--> Bool # Renames the color version identified by oldName and versionType (0 - local, 1 - remote). @@ -354,12 +361,14 @@ TimelineItem GetStereoConvergenceValues() --> {keyframes...} # Returns a dict (offset -> value) of keyframe offsets and respective convergence values. GetStereoLeftFloatingWindowParams() --> {keyframes...} # For the LEFT eye -> returns a dict (offset -> dict) of keyframe offsets and respective floating window params. Value at particular offset includes the left, right, top and bottom floating window values. GetStereoRightFloatingWindowParams() --> {keyframes...} # For the RIGHT eye -> returns a dict (offset -> dict) of keyframe offsets and respective floating window params. Value at particular offset includes the left, right, top and bottom floating window values. + GetNumNodes() --> int # Returns the number of nodes in the current graph for the timeline item SetLUT(nodeIndex, lutPath) --> Bool # Sets LUT on the node mapping the node index provided, 1 <= nodeIndex <= total number of nodes. # The lutPath can be an absolute path, or a relative path (based off custom LUT paths or the master LUT path). # The operation is successful for valid lut paths that Resolve has already discovered (see Project.RefreshLUTList). + GetLUT(nodeIndex) --> String # Gets relative LUT path based on the node index provided, 1 <= nodeIndex <= total number of nodes. SetCDL([CDL map]) --> Bool # Keys of map are: "NodeIndex", "Slope", "Offset", "Power", "Saturation", where 1 <= NodeIndex <= total number of nodes. # Example python code - SetCDL({"NodeIndex" : "1", "Slope" : "0.5 0.4 0.2", "Offset" : "0.4 0.3 0.2", "Power" : "0.6 0.7 0.8", "Saturation" : "0.65"}) - AddTake(mediaPoolItem, startFrame=0, endFrame)=0 --> Bool # Adds mediaPoolItem as a new take. Initializes a take selector for the timeline item if needed. By default, the whole clip is added. startFrame and endFrame can be specified as extents. + AddTake(mediaPoolItem, startFrame, endFrame) --> Bool # Adds mediaPoolItem as a new take. Initializes a take selector for the timeline item if needed. By default, the full clip extents is added. startFrame (int) and endFrame (int) are optional arguments used to specify the extents. GetSelectedTakeIndex() --> int # Returns the index of the currently selected take, or 0 if the clip is not a take selector. GetTakesCount() --> int # Returns the number of takes in take selector, or 0 if the clip is not a take selector. GetTakeByIndex(idx) --> {takeInfo...} # Returns a dict (keys "startFrame", "endFrame" and "mediaPoolItem") with take info for specified index. @@ -367,7 +376,24 @@ TimelineItem SelectTakeByIndex(idx) --> Bool # Selects a take by index, 1 <= idx <= number of takes. FinalizeTake() --> Bool # Finalizes take selection. CopyGrades([tgtTimelineItems]) --> Bool # Copies the current grade to all the items in tgtTimelineItems list. Returns True on success and False if any error occurred. + UpdateSidecar() --> Bool # Updates sidecar file for BRAW clips or RMD file for R3D clips. + GetUniqueId() --> string # Returns a unique ID for the timeline item +Gallery + GetAlbumName(galleryStillAlbum) --> string # Returns the name of the GalleryStillAlbum object 'galleryStillAlbum'. + SetAlbumName(galleryStillAlbum, albumName) --> Bool # Sets the name of the GalleryStillAlbum object 'galleryStillAlbum' to 'albumName'. + GetCurrentStillAlbum() --> galleryStillAlbum # Returns current album as a GalleryStillAlbum object. + SetCurrentStillAlbum(galleryStillAlbum) --> Bool # Sets current album to GalleryStillAlbum object 'galleryStillAlbum'. + GetGalleryStillAlbums() --> [galleryStillAlbum] # Returns the gallery albums as a list of GalleryStillAlbum objects. + +GalleryStillAlbum + GetStills() --> [galleryStill] # Returns the list of GalleryStill objects in the album. + GetLabel(galleryStill) --> string # Returns the label of the galleryStill. + SetLabel(galleryStill, label) --> Bool # Sets the new 'label' to GalleryStill object 'galleryStill'. + ExportStills([galleryStill], folderPath, filePrefix, format) --> Bool # Exports list of GalleryStill objects '[galleryStill]' to directory 'folderPath', with filename prefix 'filePrefix', using file format 'format' (supported formats: dpx, cin, tif, jpg, png, ppm, bmp, xpm). + DeleteStills([galleryStill]) --> Bool # Deletes specified list of GalleryStill objects '[galleryStill]'. + +GalleryStill # This class does not provide any API functions but the object type is used by functions in other classes. List and Dict Data Structures ----------------------------- @@ -375,7 +401,6 @@ Beside primitive data types, Resolve's Python API mainly uses list and dict data As Lua does not support list and dict data structures, the Lua API implements "list" as a table with indices, e.g. { [1] = listValue1, [2] = listValue2, ... }. Similarly the Lua API implements "dict" as a table with the dictionary key as first element, e.g. { [dictKey1] = dictValue1, [dictKey2] = dictValue2, ... }. - Looking up Project and Clip properties -------------------------------------- This section covers additional notes for the functions "Project:GetSetting", "Project:SetSetting", "Timeline:GetSetting", "Timeline:SetSetting", "MediaPoolItem:GetClipProperty" and @@ -412,6 +437,179 @@ Affects: • x = MediaPoolItem:GetClipProperty('Super Scale') and MediaPoolItem:SetClipProperty('Super Scale', x) +Looking up Render Settings +-------------------------- +This section covers the supported settings for the method SetRenderSettings({settings}) + +The parameter setting is a dictionary containing the following keys: + - "SelectAllFrames": Bool (when set True, the settings MarkIn and MarkOut are ignored) + - "MarkIn": int + - "MarkOut": int + - "TargetDir": string + - "CustomName": string + - "UniqueFilenameStyle": 0 - Prefix, 1 - Suffix. + - "ExportVideo": Bool + - "ExportAudio": Bool + - "FormatWidth": int + - "FormatHeight": int + - "FrameRate": float (examples: 23.976, 24) + - "PixelAspectRatio": string (for SD resolution: "16_9" or "4_3") (other resolutions: "square" or "cinemascope") + - "VideoQuality" possible values for current codec (if applicable): + - 0 (int) - will set quality to automatic + - [1 -> MAX] (int) - will set input bit rate + - ["Least", "Low", "Medium", "High", "Best"] (String) - will set input quality level + - "AudioCodec": string (example: "aac") + - "AudioBitDepth": int + - "AudioSampleRate": int + - "ColorSpaceTag" : string (example: "Same as Project", "AstroDesign") + - "GammaTag" : string (example: "Same as Project", "ACEScct") + - "ExportAlpha": Bool + - "EncodingProfile": string (example: "Main10"). Can only be set for H.264 and H.265. + - "MultiPassEncode": Bool. Can only be set for H.264. + - "AlphaMode": 0 - Premultiplied, 1 - Straight. Can only be set if "ExportAlpha" is true. + - "NetworkOptimization": Bool. Only supported by QuickTime and MP4 formats. + +Looking up timeline export properties +------------------------------------- +This section covers the parameters for the argument Export(fileName, exportType, exportSubtype). + +exportType can be one of the following constants: + - resolve.EXPORT_AAF + - resolve.EXPORT_DRT + - resolve.EXPORT_EDL + - resolve.EXPORT_FCP_7_XML + - resolve.EXPORT_FCPXML_1_3 + - resolve.EXPORT_FCPXML_1_4 + - resolve.EXPORT_FCPXML_1_5 + - resolve.EXPORT_FCPXML_1_6 + - resolve.EXPORT_FCPXML_1_7 + - resolve.EXPORT_FCPXML_1_8 + - resolve.EXPORT_FCPXML_1_9 + - resolve.EXPORT_FCPXML_1_10 + - resolve.EXPORT_HDR_10_PROFILE_A + - resolve.EXPORT_HDR_10_PROFILE_B + - resolve.EXPORT_TEXT_CSV + - resolve.EXPORT_TEXT_TAB + - resolve.EXPORT_DOLBY_VISION_VER_2_9 + - resolve.EXPORT_DOLBY_VISION_VER_4_0 +exportSubtype can be one of the following enums: + - resolve.EXPORT_NONE + - resolve.EXPORT_AAF_NEW + - resolve.EXPORT_AAF_EXISTING + - resolve.EXPORT_CDL + - resolve.EXPORT_SDL + - resolve.EXPORT_MISSING_CLIPS +Please note that exportSubType is a required parameter for resolve.EXPORT_AAF and resolve.EXPORT_EDL. For rest of the exportType, exportSubtype is ignored. +When exportType is resolve.EXPORT_AAF, valid exportSubtype values are resolve.EXPORT_AAF_NEW and resolve.EXPORT_AAF_EXISTING. +When exportType is resolve.EXPORT_EDL, valid exportSubtype values are resolve.EXPORT_CDL, resolve.EXPORT_SDL, resolve.EXPORT_MISSING_CLIPS and resolve.EXPORT_NONE. +Note: Replace 'resolve.' when using the constants above, if a different Resolve class instance name is used. + +Looking up Timeline item properties +----------------------------------- +This section covers additional notes for the function "TimelineItem:SetProperty" and "TimelineItem:GetProperty". These functions are used to get and set properties mentioned. + +The supported keys with their accepted values are: + "Pan" : floating point values from -4.0*width to 4.0*width + "Tilt" : floating point values from -4.0*height to 4.0*height + "ZoomX" : floating point values from 0.0 to 100.0 + "ZoomY" : floating point values from 0.0 to 100.0 + "ZoomGang" : a boolean value + "RotationAngle" : floating point values from -360.0 to 360.0 + "AnchorPointX" : floating point values from -4.0*width to 4.0*width + "AnchorPointY" : floating point values from -4.0*height to 4.0*height + "Pitch" : floating point values from -1.5 to 1.5 + "Yaw" : floating point values from -1.5 to 1.5 + "FlipX" : boolean value for flipping horizontally + "FlipY" : boolean value for flipping vertically + "CropLeft" : floating point values from 0.0 to width + "CropRight" : floating point values from 0.0 to width + "CropTop" : floating point values from 0.0 to height + "CropBottom" : floating point values from 0.0 to height + "CropSoftness" : floating point values from -100.0 to 100.0 + "CropRetain" : boolean value for "Retain Image Position" checkbox + "DynamicZoomEase" : A value from the following constants + - DYNAMIC_ZOOM_EASE_LINEAR = 0 + - DYNAMIC_ZOOM_EASE_IN + - DYNAMIC_ZOOM_EASE_OUT + - DYNAMIC_ZOOM_EASE_IN_AND_OUT + "CompositeMode" : A value from the following constants + - COMPOSITE_NORMAL = 0 + - COMPOSITE_ADD + - COMPOSITE_SUBTRACT + - COMPOSITE_DIFF + - COMPOSITE_MULTIPLY + - COMPOSITE_SCREEN + - COMPOSITE_OVERLAY + - COMPOSITE_HARDLIGHT + - COMPOSITE_SOFTLIGHT + - COMPOSITE_DARKEN + - COMPOSITE_LIGHTEN + - COMPOSITE_COLOR_DODGE + - COMPOSITE_COLOR_BURN + - COMPOSITE_EXCLUSION + - COMPOSITE_HUE + - COMPOSITE_SATURATE + - COMPOSITE_COLORIZE + - COMPOSITE_LUMA_MASK + - COMPOSITE_DIVIDE + - COMPOSITE_LINEAR_DODGE + - COMPOSITE_LINEAR_BURN + - COMPOSITE_LINEAR_LIGHT + - COMPOSITE_VIVID_LIGHT + - COMPOSITE_PIN_LIGHT + - COMPOSITE_HARD_MIX + - COMPOSITE_LIGHTER_COLOR + - COMPOSITE_DARKER_COLOR + - COMPOSITE_FOREGROUND + - COMPOSITE_ALPHA + - COMPOSITE_INVERTED_ALPHA + - COMPOSITE_LUM + - COMPOSITE_INVERTED_LUM + "Opacity" : floating point value from 0.0 to 100.0 + "Distortion" : floating point value from -1.0 to 1.0 + "RetimeProcess" : A value from the following constants + - RETIME_USE_PROJECT = 0 + - RETIME_NEAREST + - RETIME_FRAME_BLEND + - RETIME_OPTICAL_FLOW + "MotionEstimation" : A value from the following constants + - MOTION_EST_USE_PROJECT = 0 + - MOTION_EST_STANDARD_FASTER + - MOTION_EST_STANDARD_BETTER + - MOTION_EST_ENHANCED_FASTER + - MOTION_EST_ENHANCED_BETTER + - MOTION_EST_SPEED_WRAP + "Scaling" : A value from the following constants + - SCALE_USE_PROJECT = 0 + - SCALE_CROP + - SCALE_FIT + - SCALE_FILL + - SCALE_STRETCH + "ResizeFilter" : A value from the following constants + - RESIZE_FILTER_USE_PROJECT = 0 + - RESIZE_FILTER_SHARPER + - RESIZE_FILTER_SMOOTHER + - RESIZE_FILTER_BICUBIC + - RESIZE_FILTER_BILINEAR + - RESIZE_FILTER_BESSEL + - RESIZE_FILTER_BOX + - RESIZE_FILTER_CATMULL_ROM + - RESIZE_FILTER_CUBIC + - RESIZE_FILTER_GAUSSIAN + - RESIZE_FILTER_LANCZOS + - RESIZE_FILTER_MITCHELL + - RESIZE_FILTER_NEAREST_NEIGHBOR + - RESIZE_FILTER_QUADRATIC + - RESIZE_FILTER_SINC + - RESIZE_FILTER_LINEAR +Values beyond the range will be clipped +width and height are same as the UI max limits + +The arguments can be passed as a key and value pair or they can be grouped together into a dictionary (for python) or table (for lua) and passed +as a single argument. + +Getting the values for the keys that uses constants will return the number which is in the constant + Deprecated Resolve API Functions -------------------------------- The following API functions are deprecated. @@ -450,12 +648,12 @@ TimelineItem Unsupported Resolve API Functions --------------------------------- -The following API (functions and paraameters) are no longer supported. +The following API (functions and parameters) are no longer supported. Use job IDs instead of indices. Project StartRendering(index1, index2, ...) --> Bool # Please use unique job ids (string) instead of indices. StartRendering([idxs...]) --> Bool # Please use unique job ids (string) instead of indices. DeleteRenderJobByIndex(idx) --> Bool # Please use unique job ids (string) instead of indices. GetRenderJobStatus(idx) --> {status info} # Please use unique job ids (string) instead of indices. - GetSetting and SetSetting --> {} # settingName "videoMonitorUseRec601For422SDI" is no longer supported. - # Please use "videoMonitorUseMatrixOverrideFor422SDI" and "videoMonitorMatrixOverrideFor422SDI" instead. + GetSetting and SetSetting --> {} # settingName videoMonitorUseRec601For422SDI is now replaced with videoMonitorUseMatrixOverrideFor422SDI and videoMonitorMatrixOverrideFor422SDI. + # settingName perfProxyMediaOn is now replaced with perfProxyMediaMode which takes values 0 - disabled, 1 - when available, 2 - when source not available. diff --git a/openpype/hosts/resolve/__init__.py b/openpype/hosts/resolve/__init__.py index e69de29bb2..b4a994bbaa 100644 --- a/openpype/hosts/resolve/__init__.py +++ b/openpype/hosts/resolve/__init__.py @@ -0,0 +1,6 @@ +from .addon import ResolveAddon + + +__all__ = ( + "ResolveAddon", +) diff --git a/openpype/hosts/resolve/addon.py b/openpype/hosts/resolve/addon.py new file mode 100644 index 0000000000..a31da52a6d --- /dev/null +++ b/openpype/hosts/resolve/addon.py @@ -0,0 +1,24 @@ +import os + +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +from .utils import RESOLVE_ROOT_DIR + + +class ResolveAddon(OpenPypeModule, IHostAddon): + name = "resolve" + host_name = "resolve" + + def initialize(self, module_settings): + self.enabled = True + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(RESOLVE_ROOT_DIR, "hooks") + ] + + def get_workfile_extensions(self): + return [".drp"] diff --git a/openpype/hosts/resolve/api/__init__.py b/openpype/hosts/resolve/api/__init__.py index cf1edb4c35..00a598548e 100644 --- a/openpype/hosts/resolve/api/__init__.py +++ b/openpype/hosts/resolve/api/__init__.py @@ -1,10 +1,6 @@ """ resolve api """ - -bmdvr = None -bmdvf = None - from .utils import ( get_resolve_module ) @@ -70,6 +66,9 @@ from .workio import ( from .testing_utils import TestGUI +bmdvr = None +bmdvf = None + __all__ = [ "bmdvr", "bmdvf", diff --git a/openpype/hosts/resolve/api/action.py b/openpype/hosts/resolve/api/action.py index d55a24a39a..ceedc2cc54 100644 --- a/openpype/hosts/resolve/api/action.py +++ b/openpype/hosts/resolve/api/action.py @@ -4,7 +4,7 @@ from __future__ import absolute_import import pyblish.api -from openpype.action import get_errored_instances_from_context +from openpype.pipeline.publish import get_errored_instances_from_context class SelectInvalidAction(pyblish.api.Action): diff --git a/openpype/hosts/resolve/api/menu.py b/openpype/hosts/resolve/api/menu.py index 2c7678ee5b..86b292105a 100644 --- a/openpype/hosts/resolve/api/menu.py +++ b/openpype/hosts/resolve/api/menu.py @@ -54,15 +54,15 @@ class OpenPypeMenu(QtWidgets.QWidget): ) self.setWindowTitle("OpenPype") - workfiles_btn = QtWidgets.QPushButton("Workfiles...", self) - create_btn = QtWidgets.QPushButton("Create...", self) - publish_btn = QtWidgets.QPushButton("Publish...", self) - load_btn = QtWidgets.QPushButton("Load...", self) - inventory_btn = QtWidgets.QPushButton("Inventory...", self) - subsetm_btn = QtWidgets.QPushButton("Subset Manager...", self) - libload_btn = QtWidgets.QPushButton("Library...", self) + workfiles_btn = QtWidgets.QPushButton("Workfiles ...", self) + create_btn = QtWidgets.QPushButton("Create ...", self) + publish_btn = QtWidgets.QPushButton("Publish ...", self) + load_btn = QtWidgets.QPushButton("Load ...", self) + inventory_btn = QtWidgets.QPushButton("Manager ...", self) + subsetm_btn = QtWidgets.QPushButton("Subset Manager ...", self) + libload_btn = QtWidgets.QPushButton("Library ...", self) experimental_btn = QtWidgets.QPushButton( - "Experimental tools...", self + "Experimental tools ...", self ) # rename_btn = QtWidgets.QPushButton("Rename", self) # set_colorspace_btn = QtWidgets.QPushButton( diff --git a/openpype/hosts/resolve/api/pipeline.py b/openpype/hosts/resolve/api/pipeline.py index 1c8d9dc01c..899cb825bb 100644 --- a/openpype/hosts/resolve/api/pipeline.py +++ b/openpype/hosts/resolve/api/pipeline.py @@ -244,7 +244,7 @@ def on_pyblish_instance_toggled(instance, old_value, new_value): log.info("instance toggle: {}, old_value: {}, new_value:{} ".format( instance, old_value, new_value)) - from openpype.hosts.resolve import ( + from openpype.hosts.resolve.api import ( set_publish_attribute ) diff --git a/openpype/hosts/resolve/api/plugin.py b/openpype/hosts/resolve/api/plugin.py index b03125d502..0ed7beee59 100644 --- a/openpype/hosts/resolve/api/plugin.py +++ b/openpype/hosts/resolve/api/plugin.py @@ -4,13 +4,15 @@ import uuid import qargparse from Qt import QtWidgets, QtCore +from openpype.settings import get_current_project_settings +from openpype.pipeline.context_tools import get_current_project_asset from openpype.pipeline import ( LegacyCreator, LoaderPlugin, ) -from openpype.pipeline.context_tools import get_current_project_asset -from openpype.hosts import resolve + from . import lib +from .menu import load_stylesheet class CreatorWidget(QtWidgets.QDialog): @@ -86,7 +88,7 @@ class CreatorWidget(QtWidgets.QDialog): ok_btn.clicked.connect(self._on_ok_clicked) cancel_btn.clicked.connect(self._on_cancel_clicked) - stylesheet = resolve.api.menu.load_stylesheet() + stylesheet = load_stylesheet() self.setStyleSheet(stylesheet) def _on_ok_clicked(self): @@ -438,7 +440,7 @@ class ClipLoader: source_in = int(_clip_property("Start")) source_out = int(_clip_property("End")) - resolve.swap_clips( + lib.swap_clips( timeline_item, media_pool_item, source_in, @@ -504,7 +506,7 @@ class Creator(LegacyCreator): def __init__(self, *args, **kwargs): super(Creator, self).__init__(*args, **kwargs) - from openpype.api import get_current_project_settings + resolve_p_settings = get_current_project_settings().get("resolve") self.presets = {} if resolve_p_settings: @@ -512,13 +514,13 @@ class Creator(LegacyCreator): self.__class__.__name__, {}) # adding basic current context resolve objects - self.project = resolve.get_current_project() - self.timeline = resolve.get_current_timeline() + self.project = lib.get_current_project() + self.timeline = lib.get_current_timeline() if (self.options or {}).get("useSelection"): - self.selected = resolve.get_current_timeline_items(filter=True) + self.selected = lib.get_current_timeline_items(filter=True) else: - self.selected = resolve.get_current_timeline_items(filter=False) + self.selected = lib.get_current_timeline_items(filter=False) self.widget = CreatorWidget diff --git a/openpype/hosts/resolve/api/preload_console.py b/openpype/hosts/resolve/api/preload_console.py deleted file mode 100644 index a822ea2460..0000000000 --- a/openpype/hosts/resolve/api/preload_console.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env python -import time -from openpype.hosts.resolve.utils import get_resolve_module -from openpype.lib import Logger - -log = Logger.get_logger(__name__) - -wait_delay = 2.5 -wait = 0.00 -ready = None -while True: - try: - # Create project and set parameters: - resolve = get_resolve_module() - pm = resolve.GetProjectManager() - if pm: - ready = None - else: - ready = True - except AttributeError: - pass - - if ready is None: - time.sleep(wait_delay) - log.info(f"Waiting {wait}s for Resolve to have opened Project Manager") - wait += wait_delay - else: - print(f"Preloaded variables: \n\n\tResolve module: " - f"`resolve` > {type(resolve)} \n\tProject manager: " - f"`pm` > {type(pm)}") - break diff --git a/openpype/hosts/resolve/api/workio.py b/openpype/hosts/resolve/api/workio.py index 5a742ecf7e..5ce73eea53 100644 --- a/openpype/hosts/resolve/api/workio.py +++ b/openpype/hosts/resolve/api/workio.py @@ -1,7 +1,7 @@ """Host API required Work Files tool""" import os -from openpype.api import Logger +from openpype.lib import Logger from .lib import ( get_project_manager, get_current_project, diff --git a/openpype/hosts/resolve/hooks/pre_resolve_setup.py b/openpype/hosts/resolve/hooks/pre_resolve_setup.py index 1d977e2d8e..8574b3ad01 100644 --- a/openpype/hosts/resolve/hooks/pre_resolve_setup.py +++ b/openpype/hosts/resolve/hooks/pre_resolve_setup.py @@ -1,5 +1,5 @@ import os - +import platform from openpype.lib import PreLaunchHook from openpype.hosts.resolve.utils import setup @@ -14,35 +14,91 @@ class ResolvePrelaunch(PreLaunchHook): app_groups = ["resolve"] def execute(self): + current_platform = platform.system().lower() + + PROGRAMDATA = self.launch_context.env.get("PROGRAMDATA", "") + RESOLVE_SCRIPT_API_ = { + "windows": ( + f"{PROGRAMDATA}/Blackmagic Design/" + "DaVinci Resolve/Support/Developer/Scripting" + ), + "darwin": ( + "/Library/Application Support/Blackmagic Design" + "/DaVinci Resolve/Developer/Scripting" + ), + "linux": "/opt/resolve/Developer/Scripting" + } + RESOLVE_SCRIPT_API = os.path.normpath( + RESOLVE_SCRIPT_API_[current_platform]) + self.launch_context.env["RESOLVE_SCRIPT_API"] = RESOLVE_SCRIPT_API + + RESOLVE_SCRIPT_LIB_ = { + "windows": ( + "C:/Program Files/Blackmagic Design" + "/DaVinci Resolve/fusionscript.dll" + ), + "darwin": ( + "/Applications/DaVinci Resolve/DaVinci Resolve.app" + "/Contents/Libraries/Fusion/fusionscript.so" + ), + "linux": "/opt/resolve/libs/Fusion/fusionscript.so" + } + RESOLVE_SCRIPT_LIB = os.path.normpath( + RESOLVE_SCRIPT_LIB_[current_platform]) + self.launch_context.env["RESOLVE_SCRIPT_LIB"] = RESOLVE_SCRIPT_LIB + # TODO: add OTIO installation from `openpype/requirements.py` - # making sure python 3.6 is installed at provided path - py36_dir = os.path.normpath( - self.launch_context.env.get("PYTHON36_RESOLVE", "")) - assert os.path.isdir(py36_dir), ( - "Python 3.6 is not installed at the provided folder path. Either " + # making sure python <3.9.* is installed at provided path + python3_home = os.path.normpath( + self.launch_context.env.get("RESOLVE_PYTHON3_HOME", "")) + + assert os.path.isdir(python3_home), ( + "Python 3 is not installed at the provided folder path. Either " "make sure the `environments\resolve.json` is having correctly " - "set `PYTHON36_RESOLVE` or make sure Python 3.6 is installed " - f"in given path. \nPYTHON36_RESOLVE: `{py36_dir}`" + "set `RESOLVE_PYTHON3_HOME` or make sure Python 3 is installed " + f"in given path. \nRESOLVE_PYTHON3_HOME: `{python3_home}`" ) - self.log.info(f"Path to Resolve Python folder: `{py36_dir}`...") + self.launch_context.env["PYTHONHOME"] = python3_home + self.log.info(f"Path to Resolve Python folder: `{python3_home}`...") + # add to the python path to path + env_path = self.launch_context.env["PATH"] + self.launch_context.env["PATH"] = os.pathsep.join([ + python3_home, + os.path.join(python3_home, "Scripts") + ] + env_path.split(os.pathsep)) + + self.log.debug(f"PATH: {self.launch_context.env['PATH']}") + + # add to the PYTHONPATH + env_pythonpath = self.launch_context.env["PYTHONPATH"] + self.launch_context.env["PYTHONPATH"] = os.pathsep.join([ + os.path.join(python3_home, "Lib", "site-packages"), + os.path.join(RESOLVE_SCRIPT_API, "Modules"), + ] + env_pythonpath.split(os.pathsep)) + + self.log.debug(f"PYTHONPATH: {self.launch_context.env['PYTHONPATH']}") + + RESOLVE_UTILITY_SCRIPTS_DIR_ = { + "windows": ( + f"{PROGRAMDATA}/Blackmagic Design" + "/DaVinci Resolve/Fusion/Scripts/Comp" + ), + "darwin": ( + "/Library/Application Support/Blackmagic Design" + "/DaVinci Resolve/Fusion/Scripts/Comp" + ), + "linux": "/opt/resolve/Fusion/Scripts/Comp" + } + RESOLVE_UTILITY_SCRIPTS_DIR = os.path.normpath( + RESOLVE_UTILITY_SCRIPTS_DIR_[current_platform] + ) # setting utility scripts dir for scripts syncing - us_dir = os.path.normpath( - self.launch_context.env.get("RESOLVE_UTILITY_SCRIPTS_DIR", "") - ) - assert os.path.isdir(us_dir), ( - "Resolve utility script dir does not exists. Either make sure " - "the `environments\resolve.json` is having correctly set " - "`RESOLVE_UTILITY_SCRIPTS_DIR` or reinstall DaVinci Resolve. \n" - f"RESOLVE_UTILITY_SCRIPTS_DIR: `{us_dir}`" - ) - self.log.debug(f"-- us_dir: `{us_dir}`") + self.launch_context.env["RESOLVE_UTILITY_SCRIPTS_DIR"] = ( + RESOLVE_UTILITY_SCRIPTS_DIR) - # correctly format path for pre python script - pre_py_sc = os.path.normpath( - self.launch_context.env.get("PRE_PYTHON_SCRIPT", "")) - self.launch_context.env["PRE_PYTHON_SCRIPT"] = pre_py_sc - self.log.debug(f"-- pre_py_sc: `{pre_py_sc}`...") + # remove terminal coloring tags + self.launch_context.env["OPENPYPE_LOG_NO_COLORS"] = "True" # Resolve Setup integration setup(self.launch_context.env) diff --git a/openpype/hosts/resolve/plugins/publish/extract_workfile.py b/openpype/hosts/resolve/plugins/publish/extract_workfile.py index ea8f19cd8c..535f879b58 100644 --- a/openpype/hosts/resolve/plugins/publish/extract_workfile.py +++ b/openpype/hosts/resolve/plugins/publish/extract_workfile.py @@ -1,10 +1,11 @@ import os import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.resolve.api.lib import get_project_manager -class ExtractWorkfile(openpype.api.Extractor): +class ExtractWorkfile(publish.Extractor): """ Extractor export DRP workfile file representation """ diff --git a/openpype/hosts/resolve/utils.py b/openpype/hosts/resolve/utils.py index 382a7cf344..5881f153ae 100644 --- a/openpype/hosts/resolve/utils.py +++ b/openpype/hosts/resolve/utils.py @@ -9,7 +9,8 @@ def setup(env): log = Logger.get_logger("ResolveSetup") scripts = {} us_env = env.get("RESOLVE_UTILITY_SCRIPTS_SOURCE_DIR") - us_dir = env.get("RESOLVE_UTILITY_SCRIPTS_DIR", "") + us_dir = env["RESOLVE_UTILITY_SCRIPTS_DIR"] + us_paths = [os.path.join( RESOLVE_ROOT_DIR, "utility_scripts" @@ -17,7 +18,7 @@ def setup(env): # collect script dirs if us_env: - log.info(f"Utility Scripts Env: `{us_env}`") + log.info("Utility Scripts Env: `{}`".format(us_env)) us_paths = us_env.split( os.pathsep) + us_paths @@ -25,13 +26,13 @@ def setup(env): for path in us_paths: scripts.update({path: os.listdir(path)}) - log.info(f"Utility Scripts Dir: `{us_paths}`") - log.info(f"Utility Scripts: `{scripts}`") + log.info("Utility Scripts Dir: `{}`".format(us_paths)) + log.info("Utility Scripts: `{}`".format(scripts)) # make sure no script file is in folder for s in os.listdir(us_dir): path = os.path.join(us_dir, s) - log.info(f"Removing `{path}`...") + log.info("Removing `{}`...".format(path)) if os.path.isdir(path): shutil.rmtree(path, onerror=None) else: @@ -44,7 +45,7 @@ def setup(env): # script in script list src = os.path.join(d, s) dst = os.path.join(us_dir, s) - log.info(f"Copying `{src}` to `{dst}`...") + log.info("Copying `{}` to `{}`...".format(src, dst)) if os.path.isdir(src): shutil.copytree( src, dst, symlinks=False, diff --git a/openpype/hosts/standalonepublisher/__init__.py b/openpype/hosts/standalonepublisher/__init__.py index 394d5be397..f47fa6b573 100644 --- a/openpype/hosts/standalonepublisher/__init__.py +++ b/openpype/hosts/standalonepublisher/__init__.py @@ -1,6 +1,6 @@ -from .standalonepublish_module import StandAlonePublishModule +from .addon import StandAlonePublishAddon __all__ = ( - "StandAlonePublishModule", + "StandAlonePublishAddon", ) diff --git a/openpype/hosts/standalonepublisher/standalonepublish_module.py b/openpype/hosts/standalonepublisher/addon.py similarity index 82% rename from openpype/hosts/standalonepublisher/standalonepublish_module.py rename to openpype/hosts/standalonepublisher/addon.py index bf8e1d2c23..98ec44d4e2 100644 --- a/openpype/hosts/standalonepublisher/standalonepublish_module.py +++ b/openpype/hosts/standalonepublisher/addon.py @@ -5,18 +5,18 @@ import click from openpype.lib import get_openpype_execute_args from openpype.lib.execute import run_detached_process from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import ITrayAction, IHostModule +from openpype.modules.interfaces import ITrayAction, IHostAddon STANDALONEPUBLISH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class StandAlonePublishModule(OpenPypeModule, ITrayAction, IHostModule): +class StandAlonePublishAddon(OpenPypeModule, ITrayAction, IHostAddon): label = "Publish" - name = "standalonepublish_tool" + name = "standalonepublisher" host_name = "standalonepublisher" def initialize(self, modules_settings): - self.enabled = modules_settings[self.name]["enabled"] + self.enabled = modules_settings["standalonepublish_tool"]["enabled"] self.publish_paths = [ os.path.join(STANDALONEPUBLISH_ROOT_DIR, "plugins", "publish") ] @@ -42,7 +42,7 @@ class StandAlonePublishModule(OpenPypeModule, ITrayAction, IHostModule): @click.group( - StandAlonePublishModule.name, + StandAlonePublishAddon.name, help="StandalonePublisher related commands.") def cli_main(): pass diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py index 052a97af7d..7925b0ecf3 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py @@ -2,8 +2,8 @@ import copy import json import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc from openpype.client import get_asset_by_name +from openpype.pipeline.create import get_subset_name class CollectBulkMovInstances(pyblish.api.InstancePlugin): @@ -44,12 +44,14 @@ class CollectBulkMovInstances(pyblish.api.InstancePlugin): task_name = available_task_names[_task_name_low] break - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( self.new_instance_family, self.subset_name_variant, task_name, asset_doc, - project_name + project_name, + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] ) instance_name = f"{asset_name}_{subset_name}" diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_editorial_resources.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_editorial_resources.py index afb828474d..3d2b6d04ad 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_editorial_resources.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_editorial_resources.py @@ -1,6 +1,8 @@ import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateEditorialResources(pyblish.api.InstancePlugin): @@ -13,7 +15,7 @@ class ValidateEditorialResources(pyblish.api.InstancePlugin): # make sure it is enabled only if at least both families are available match = pyblish.api.Subset - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder def process(self, instance): self.log.debug( diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py index ff7f60354e..074c62ea0e 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py @@ -2,9 +2,11 @@ import re import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError from openpype.pipeline.context_tools import get_current_project_asset +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateFrameRange(pyblish.api.InstancePlugin): @@ -13,7 +15,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): label = "Validate Frame Range" hosts = ["standalonepublisher"] families = ["render"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder optional = True # published data might be sequence (.mov, .mp4) in that counting files diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_shot_duplicates.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_shot_duplicates.py index fe655f6b74..df04ae3b66 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_shot_duplicates.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_shot_duplicates.py @@ -1,14 +1,17 @@ import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) + class ValidateShotDuplicates(pyblish.api.ContextPlugin): """Validating no duplicate names are in context.""" label = "Validate Shot Duplicates" hosts = ["standalonepublisher"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder def process(self, context): shot_names = [] diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_simple_unreal_texture_naming.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_simple_unreal_texture_naming.py index ef8da9f280..c123bef4f8 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_simple_unreal_texture_naming.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_simple_unreal_texture_naming.py @@ -1,16 +1,19 @@ # -*- coding: utf-8 -*- """Validator for correct file naming.""" -import pyblish.api -import openpype.api import re -from openpype.pipeline import PublishXmlValidationError +import pyblish.api + +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateSimpleUnrealTextureNaming(pyblish.api.InstancePlugin): label = "Validate Unreal Texture Names" hosts = ["standalonepublisher"] families = ["simpleUnrealTexture"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder regex = "^T_{asset}.*" def process(self, instance): diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_sources.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_sources.py index 316f58988f..1782f53de2 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_sources.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_sources.py @@ -2,8 +2,10 @@ import os import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateSources(pyblish.api.InstancePlugin): @@ -13,7 +15,7 @@ class ValidateSources(pyblish.api.InstancePlugin): got deleted between starting of SP and now. """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Check source files" optional = True # only for unforeseeable cases diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_batch.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_batch.py index d66fb257bb..44f69e48f7 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_batch.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_batch.py @@ -1,7 +1,9 @@ import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateTextureBatch(pyblish.api.InstancePlugin): @@ -9,7 +11,7 @@ class ValidateTextureBatch(pyblish.api.InstancePlugin): label = "Validate Texture Presence" hosts = ["standalonepublisher"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["texture_batch_workfile"] optional = False diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_has_workfile.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_has_workfile.py index 0e67464f59..f489d37f59 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_has_workfile.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_has_workfile.py @@ -1,7 +1,9 @@ import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateTextureHasWorkfile(pyblish.api.InstancePlugin): @@ -12,7 +14,7 @@ class ValidateTextureHasWorkfile(pyblish.api.InstancePlugin): """ label = "Validate Texture Has Workfile" hosts = ["standalonepublisher"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["textures"] optional = True diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_name.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_name.py index 751ad917ca..22f4a0eafc 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_name.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_name.py @@ -1,14 +1,16 @@ import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateTextureBatchNaming(pyblish.api.InstancePlugin): """Validates that all instances had properly formatted name.""" label = "Validate Texture Batch Naming" hosts = ["standalonepublisher"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["texture_batch_workfile", "textures"] optional = False diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_versions.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_versions.py index 84d9def895..dab160d537 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_versions.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_versions.py @@ -1,7 +1,9 @@ import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateTextureBatchVersions(pyblish.api.InstancePlugin): @@ -14,7 +16,7 @@ class ValidateTextureBatchVersions(pyblish.api.InstancePlugin): """ label = "Validate Texture Batch Versions" hosts = ["standalonepublisher"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["textures"] optional = False diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py index fa492a80d8..56ea82f6b6 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py @@ -1,7 +1,9 @@ import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateTextureBatchWorkfiles(pyblish.api.InstancePlugin): @@ -12,7 +14,7 @@ class ValidateTextureBatchWorkfiles(pyblish.api.InstancePlugin): label = "Validate Texture Workfile Has Resources" hosts = ["standalonepublisher"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["texture_batch_workfile"] optional = True diff --git a/openpype/hosts/testhost/README.md b/openpype/hosts/testhost/README.md deleted file mode 100644 index f69e02a3b3..0000000000 --- a/openpype/hosts/testhost/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# What is `testhost` -Host `testhost` was created to fake running host for testing of publisher. - -Does not have any proper launch mechanism at the moment. There is python script `./run_publish.py` which will show publisher window. The script requires to set few variables to run. Execution will register host `testhost`, register global publish plugins and register creator and publish plugins from `./plugins`. - -## Data -Created instances and context data are stored into json files inside `./api` folder. Can be easily modified to save them to a different place. - -## Plugins -Test host has few plugins to be able test publishing. - -### Creators -They are just example plugins using functions from `api` to create/remove/update data. One of them is auto creator which means that is triggered on each reset of create context. Others are manual creators both creating the same family. - -### Publishers -Collectors are example plugin to use `get_attribute_defs` to define attributes for specific families or for context. Validators are to test `PublishValidationError`. diff --git a/openpype/hosts/testhost/api/__init__.py b/openpype/hosts/testhost/api/__init__.py deleted file mode 100644 index a929a891aa..0000000000 --- a/openpype/hosts/testhost/api/__init__.py +++ /dev/null @@ -1,43 +0,0 @@ -import os -import logging -import pyblish.api - -from openpype.pipeline import register_creator_plugin_path - -from .pipeline import ( - ls, - list_instances, - update_instances, - remove_instances, - get_context_data, - update_context_data, - get_context_title -) - - -HOST_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") -PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") -CREATE_PATH = os.path.join(PLUGINS_DIR, "create") - -log = logging.getLogger(__name__) - - -def install(): - log.info("OpenPype - Installing TestHost integration") - pyblish.api.register_host("testhost") - pyblish.api.register_plugin_path(PUBLISH_PATH) - register_creator_plugin_path(CREATE_PATH) - - -__all__ = ( - "ls", - "list_instances", - "update_instances", - "remove_instances", - "get_context_data", - "update_context_data", - "get_context_title", - - "install" -) diff --git a/openpype/hosts/testhost/api/context.json b/openpype/hosts/testhost/api/context.json deleted file mode 100644 index 0967ef424b..0000000000 --- a/openpype/hosts/testhost/api/context.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/openpype/hosts/testhost/api/instances.json b/openpype/hosts/testhost/api/instances.json deleted file mode 100644 index d955012514..0000000000 --- a/openpype/hosts/testhost/api/instances.json +++ /dev/null @@ -1,108 +0,0 @@ -[ - { - "id": "pyblish.avalon.instance", - "active": true, - "family": "test", - "subset": "testMyVariant", - "version": 1, - "asset": "sq01_sh0010", - "task": "Compositing", - "variant": "myVariant", - "instance_id": "a485f148-9121-46a5-8157-aa64df0fb449", - "creator_attributes": { - "number_key": 10, - "ha": 10 - }, - "publish_attributes": { - "CollectFtrackApi": { - "add_ftrack_family": false - } - }, - "creator_identifier": "test_one" - }, - { - "id": "pyblish.avalon.instance", - "active": true, - "family": "test", - "subset": "testMyVariant2", - "version": 1, - "asset": "sq01_sh0010", - "task": "Compositing", - "variant": "myVariant2", - "creator_attributes": {}, - "instance_id": "a485f148-9121-46a5-8157-aa64df0fb444", - "publish_attributes": { - "CollectFtrackApi": { - "add_ftrack_family": true - } - }, - "creator_identifier": "test_one" - }, - { - "id": "pyblish.avalon.instance", - "active": true, - "family": "test", - "subset": "testMain", - "version": 1, - "asset": "sq01_sh0010", - "task": "Compositing", - "variant": "Main", - "creator_attributes": {}, - "instance_id": "3607bc95-75f6-4648-a58d-e699f413d09f", - "publish_attributes": { - "CollectFtrackApi": { - "add_ftrack_family": true - } - }, - "creator_identifier": "test_two" - }, - { - "id": "pyblish.avalon.instance", - "active": true, - "family": "test", - "subset": "testMain2", - "version": 1, - "asset": "sq01_sh0020", - "task": "Compositing", - "variant": "Main2", - "instance_id": "4ccf56f6-9982-4837-967c-a49695dbe8eb", - "creator_attributes": {}, - "publish_attributes": { - "CollectFtrackApi": { - "add_ftrack_family": true - } - }, - "creator_identifier": "test_two" - }, - { - "id": "pyblish.avalon.instance", - "family": "test_three", - "subset": "test_threeMain2", - "active": true, - "version": 1, - "asset": "sq01_sh0020", - "task": "Compositing", - "variant": "Main2", - "instance_id": "4ccf56f6-9982-4837-967c-a49695dbe8ec", - "creator_attributes": {}, - "publish_attributes": { - "CollectFtrackApi": { - "add_ftrack_family": true - } - } - }, - { - "id": "pyblish.avalon.instance", - "family": "workfile", - "subset": "workfileMain", - "active": true, - "creator_identifier": "workfile", - "version": 1, - "asset": "Alpaca_01", - "task": "modeling", - "variant": "Main", - "instance_id": "7c9ddfc7-9f9c-4c1c-b233-38c966735fb6", - "creator_attributes": {}, - "publish_attributes": {} - } -] \ No newline at end of file diff --git a/openpype/hosts/testhost/api/pipeline.py b/openpype/hosts/testhost/api/pipeline.py deleted file mode 100644 index 1e05f336fb..0000000000 --- a/openpype/hosts/testhost/api/pipeline.py +++ /dev/null @@ -1,155 +0,0 @@ -import os -import json -from openpype.client import get_asset_by_name - - -class HostContext: - instances_json_path = None - context_json_path = None - - @classmethod - def get_context_title(cls): - project_name = os.environ.get("AVALON_PROJECT") - if not project_name: - return "TestHost" - - asset_name = os.environ.get("AVALON_ASSET") - if not asset_name: - return project_name - - asset_doc = get_asset_by_name( - project_name, asset_name, fields=["data.parents"] - ) - - parents = asset_doc.get("data", {}).get("parents") or [] - - hierarchy = [project_name] - hierarchy.extend(parents) - hierarchy.append("{}".format(asset_name)) - task_name = os.environ.get("AVALON_TASK") - if task_name: - hierarchy.append(task_name) - - return "/".join(hierarchy) - - @classmethod - def get_current_dir_filepath(cls, filename): - return os.path.join( - os.path.dirname(os.path.abspath(__file__)), - filename - ) - - @classmethod - def get_instances_json_path(cls): - if cls.instances_json_path is None: - cls.instances_json_path = cls.get_current_dir_filepath( - "instances.json" - ) - return cls.instances_json_path - - @classmethod - def get_context_json_path(cls): - if cls.context_json_path is None: - cls.context_json_path = cls.get_current_dir_filepath( - "context.json" - ) - return cls.context_json_path - - @classmethod - def add_instance(cls, instance): - instances = cls.get_instances() - instances.append(instance) - cls.save_instances(instances) - - @classmethod - def save_instances(cls, instances): - json_path = cls.get_instances_json_path() - with open(json_path, "w") as json_stream: - json.dump(instances, json_stream, indent=4) - - @classmethod - def get_instances(cls): - json_path = cls.get_instances_json_path() - if not os.path.exists(json_path): - instances = [] - with open(json_path, "w") as json_stream: - json.dump(json_stream, instances) - else: - with open(json_path, "r") as json_stream: - instances = json.load(json_stream) - return instances - - @classmethod - def get_context_data(cls): - json_path = cls.get_context_json_path() - if not os.path.exists(json_path): - data = {} - with open(json_path, "w") as json_stream: - json.dump(data, json_stream) - else: - with open(json_path, "r") as json_stream: - data = json.load(json_stream) - return data - - @classmethod - def save_context_data(cls, data): - json_path = cls.get_context_json_path() - with open(json_path, "w") as json_stream: - json.dump(data, json_stream, indent=4) - - -def ls(): - return [] - - -def list_instances(): - return HostContext.get_instances() - - -def update_instances(update_list): - updated_instances = {} - for instance, _changes in update_list: - updated_instances[instance.id] = instance.data_to_store() - - instances = HostContext.get_instances() - for instance_data in instances: - instance_id = instance_data["instance_id"] - if instance_id in updated_instances: - new_instance_data = updated_instances[instance_id] - old_keys = set(instance_data.keys()) - new_keys = set(new_instance_data.keys()) - instance_data.update(new_instance_data) - for key in (old_keys - new_keys): - instance_data.pop(key) - - HostContext.save_instances(instances) - - -def remove_instances(instances): - if not isinstance(instances, (tuple, list)): - instances = [instances] - - current_instances = HostContext.get_instances() - for instance in instances: - instance_id = instance.data["instance_id"] - found_idx = None - for idx, _instance in enumerate(current_instances): - if instance_id == _instance["instance_id"]: - found_idx = idx - break - - if found_idx is not None: - current_instances.pop(found_idx) - HostContext.save_instances(current_instances) - - -def get_context_data(): - return HostContext.get_context_data() - - -def update_context_data(data, changes): - HostContext.save_context_data(data) - - -def get_context_title(): - return HostContext.get_context_title() diff --git a/openpype/hosts/testhost/plugins/create/auto_creator.py b/openpype/hosts/testhost/plugins/create/auto_creator.py deleted file mode 100644 index 8d59fc3242..0000000000 --- a/openpype/hosts/testhost/plugins/create/auto_creator.py +++ /dev/null @@ -1,75 +0,0 @@ -from openpype.lib import NumberDef -from openpype.client import get_asset_by_name -from openpype.pipeline import ( - legacy_io, - AutoCreator, - CreatedInstance, -) -from openpype.hosts.testhost.api import pipeline - - -class MyAutoCreator(AutoCreator): - identifier = "workfile" - family = "workfile" - - def get_instance_attr_defs(self): - output = [ - NumberDef("number_key", label="Number") - ] - return output - - def collect_instances(self): - for instance_data in pipeline.list_instances(): - creator_id = instance_data.get("creator_identifier") - if creator_id == self.identifier: - subset_name = instance_data["subset"] - instance = CreatedInstance( - self.family, subset_name, instance_data, self - ) - self._add_instance_to_context(instance) - - def update_instances(self, update_list): - pipeline.update_instances(update_list) - - def create(self): - existing_instance = None - for instance in self.create_context.instances: - if instance.family == self.family: - existing_instance = instance - break - - variant = "Main" - project_name = legacy_io.Session["AVALON_PROJECT"] - asset_name = legacy_io.Session["AVALON_ASSET"] - task_name = legacy_io.Session["AVALON_TASK"] - host_name = legacy_io.Session["AVALON_APP"] - - if existing_instance is None: - asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name - ) - data = { - "asset": asset_name, - "task": task_name, - "variant": variant - } - data.update(self.get_dynamic_data( - variant, task_name, asset_doc, project_name, host_name - )) - - new_instance = CreatedInstance( - self.family, subset_name, data, self - ) - self._add_instance_to_context(new_instance) - - elif ( - existing_instance["asset"] != asset_name - or existing_instance["task"] != task_name - ): - asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name - ) - existing_instance["asset"] = asset_name - existing_instance["task"] = task_name diff --git a/openpype/hosts/testhost/plugins/create/test_creator_1.py b/openpype/hosts/testhost/plugins/create/test_creator_1.py deleted file mode 100644 index 7664276fa2..0000000000 --- a/openpype/hosts/testhost/plugins/create/test_creator_1.py +++ /dev/null @@ -1,94 +0,0 @@ -import json -from openpype import resources -from openpype.hosts.testhost.api import pipeline -from openpype.lib import ( - UISeparatorDef, - UILabelDef, - BoolDef, - NumberDef, - FileDef, -) -from openpype.pipeline import ( - Creator, - CreatedInstance, -) - - -class TestCreatorOne(Creator): - identifier = "test_one" - label = "test" - family = "test" - description = "Testing creator of testhost" - - create_allow_context_change = False - - def get_icon(self): - return resources.get_openpype_splash_filepath() - - def collect_instances(self): - for instance_data in pipeline.list_instances(): - creator_id = instance_data.get("creator_identifier") - if creator_id == self.identifier: - instance = CreatedInstance.from_existing( - instance_data, self - ) - self._add_instance_to_context(instance) - - def update_instances(self, update_list): - pipeline.update_instances(update_list) - - def remove_instances(self, instances): - pipeline.remove_instances(instances) - for instance in instances: - self._remove_instance_from_context(instance) - - def create(self, subset_name, data, pre_create_data): - print("Data that can be used in create:\n{}".format( - json.dumps(pre_create_data, indent=4) - )) - new_instance = CreatedInstance(self.family, subset_name, data, self) - pipeline.HostContext.add_instance(new_instance.data_to_store()) - self.log.info(new_instance.data) - self._add_instance_to_context(new_instance) - - def get_default_variants(self): - return [ - "myVariant", - "variantTwo", - "different_variant" - ] - - def get_instance_attr_defs(self): - output = [ - NumberDef("number_key", label="Number"), - ] - return output - - def get_pre_create_attr_defs(self): - output = [ - BoolDef("use_selection", label="Use selection"), - UISeparatorDef(), - UILabelDef("Testing label"), - FileDef("filepath", folders=True, label="Filepath"), - FileDef( - "filepath_2", multipath=True, folders=True, label="Filepath 2" - ) - ] - return output - - def get_detail_description(self): - return """# Relictus funes est Nyseides currusque nunc oblita - -## Causa sed - -Lorem markdownum posito consumptis, *plebe Amorque*, abstitimus rogatus fictaque -gladium Circe, nos? Bos aeternum quae. Utque me, si aliquem cladis, et vestigia -arbor, sic mea ferre lacrimae agantur prospiciens hactenus. Amanti dentes pete, -vos quid laudemque rastrorumque terras in gratantibus **radix** erat cedemus? - -Pudor tu ponderibus verbaque illa; ire ergo iam Venus patris certe longae -cruentum lecta, et quaeque. Sit doce nox. Anteit ad tempora magni plenaque et -videres mersit sibique auctor in tendunt mittit cunctos ventisque gravitate -volucris quemquam Aeneaden. Pectore Mensis somnus; pectora -[ferunt](http://www.mox.org/oculosbracchia)? Fertilitatis bella dulce et suum? - """ diff --git a/openpype/hosts/testhost/plugins/create/test_creator_2.py b/openpype/hosts/testhost/plugins/create/test_creator_2.py deleted file mode 100644 index f54adee8a2..0000000000 --- a/openpype/hosts/testhost/plugins/create/test_creator_2.py +++ /dev/null @@ -1,74 +0,0 @@ -from openpype.lib import NumberDef, TextDef -from openpype.hosts.testhost.api import pipeline -from openpype.pipeline import ( - Creator, - CreatedInstance, -) - - -class TestCreatorTwo(Creator): - identifier = "test_two" - label = "test" - family = "test" - description = "A second testing creator" - - def get_icon(self): - return "cube" - - def create(self, subset_name, data, pre_create_data): - new_instance = CreatedInstance(self.family, subset_name, data, self) - pipeline.HostContext.add_instance(new_instance.data_to_store()) - self.log.info(new_instance.data) - self._add_instance_to_context(new_instance) - - def collect_instances(self): - for instance_data in pipeline.list_instances(): - creator_id = instance_data.get("creator_identifier") - if creator_id == self.identifier: - instance = CreatedInstance.from_existing( - instance_data, self - ) - self._add_instance_to_context(instance) - - def update_instances(self, update_list): - pipeline.update_instances(update_list) - - def remove_instances(self, instances): - pipeline.remove_instances(instances) - for instance in instances: - self._remove_instance_from_context(instance) - - def get_instance_attr_defs(self): - output = [ - NumberDef("number_key"), - TextDef("text_key") - ] - return output - - def get_detail_description(self): - return """# Lorem ipsum, dolor sit amet. [![Awesome](https://cdn.rawgit.com/sindresorhus/awesome/d7305f38d29fed78fa85652e3a63e154dd8e8829/media/badge.svg)](https://github.com/sindresorhus/awesome) - -> A curated list of awesome lorem ipsum generators. - -Inspired by the [awesome](https://github.com/sindresorhus/awesome) list thing. - - -## Table of Contents - -- [Legend](#legend) -- [Practical](#briefcase-practical) -- [Whimsical](#roller_coaster-whimsical) - - [Animals](#rabbit-animals) - - [Eras](#tophat-eras) - - [Famous Individuals](#sunglasses-famous-individuals) - - [Music](#microphone-music) - - [Food and Drink](#pizza-food-and-drink) - - [Geographic and Dialects](#earth_africa-geographic-and-dialects) - - [Literature](#books-literature) - - [Miscellaneous](#cyclone-miscellaneous) - - [Sports and Fitness](#bicyclist-sports-and-fitness) - - [TV and Film](#movie_camera-tv-and-film) -- [Tools, Apps, and Extensions](#wrench-tools-apps-and-extensions) -- [Contribute](#contribute) -- [TODO](#todo) -""" diff --git a/openpype/hosts/testhost/plugins/publish/collect_context.py b/openpype/hosts/testhost/plugins/publish/collect_context.py deleted file mode 100644 index 0ab98fb84b..0000000000 --- a/openpype/hosts/testhost/plugins/publish/collect_context.py +++ /dev/null @@ -1,34 +0,0 @@ -import pyblish.api - -from openpype.pipeline import ( - OpenPypePyblishPluginMixin, - attribute_definitions -) - - -class CollectContextDataTestHost( - pyblish.api.ContextPlugin, OpenPypePyblishPluginMixin -): - """ - Collecting temp json data sent from a host context - and path for returning json data back to hostself. - """ - - label = "Collect Source - Test Host" - order = pyblish.api.CollectorOrder - 0.4 - hosts = ["testhost"] - - @classmethod - def get_attribute_defs(cls): - return [ - attribute_definitions.BoolDef( - "test_bool", - True, - label="Bool input" - ) - ] - - def process(self, context): - # get json paths from os and load them - for instance in context: - instance.data["source"] = "testhost" diff --git a/openpype/hosts/testhost/plugins/publish/collect_instance_1.py b/openpype/hosts/testhost/plugins/publish/collect_instance_1.py deleted file mode 100644 index c7241a15a8..0000000000 --- a/openpype/hosts/testhost/plugins/publish/collect_instance_1.py +++ /dev/null @@ -1,52 +0,0 @@ -import json -import pyblish.api - -from openpype.lib import attribute_definitions -from openpype.pipeline import OpenPypePyblishPluginMixin - - -class CollectInstanceOneTestHost( - pyblish.api.InstancePlugin, OpenPypePyblishPluginMixin -): - """ - Collecting temp json data sent from a host context - and path for returning json data back to hostself. - """ - - label = "Collect Instance 1 - Test Host" - order = pyblish.api.CollectorOrder - 0.3 - hosts = ["testhost"] - - @classmethod - def get_attribute_defs(cls): - return [ - attribute_definitions.NumberDef( - "version", - default=1, - minimum=1, - maximum=999, - decimals=0, - label="Version" - ) - ] - - def process(self, instance): - self._debug_log(instance) - - publish_attributes = instance.data.get("publish_attributes") - if not publish_attributes: - return - - values = publish_attributes.get(self.__class__.__name__) - if not values: - return - - instance.data["version"] = values["version"] - - def _debug_log(self, instance): - def _default_json(value): - return str(value) - - self.log.info( - json.dumps(instance.data, indent=4, default=_default_json) - ) diff --git a/openpype/hosts/testhost/plugins/publish/validate_context_with_error.py b/openpype/hosts/testhost/plugins/publish/validate_context_with_error.py deleted file mode 100644 index 46e996a569..0000000000 --- a/openpype/hosts/testhost/plugins/publish/validate_context_with_error.py +++ /dev/null @@ -1,57 +0,0 @@ -import pyblish.api -from openpype.pipeline import PublishValidationError - - -class ValidateInstanceAssetRepair(pyblish.api.Action): - """Repair the instance asset.""" - - label = "Repair" - icon = "wrench" - on = "failed" - - def process(self, context, plugin): - pass - - -description = """ -## Publish plugins - -### Validate Scene Settings - -#### Skip Resolution Check for Tasks - -Set regex pattern(s) to look for in a Task name to skip resolution check against values from DB. - -#### Skip Timeline Check for Tasks - -Set regex pattern(s) to look for in a Task name to skip `frameStart`, `frameEnd` check against values from DB. - -### AfterEffects Submit to Deadline - -* `Use Published scene` - Set to True (green) when Deadline should take published scene as a source instead of uploaded local one. -* `Priority` - priority of job on farm -* `Primary Pool` - here is list of pool fetched from server you can select from. -* `Secondary Pool` -* `Frames Per Task` - number of sequence division between individual tasks (chunks) -making one job on farm. -""" - - -class ValidateContextWithError(pyblish.api.ContextPlugin): - """Validate the instance asset is the current selected context asset. - - As it might happen that multiple worfiles are opened, switching - between them would mess with selected context. - In that case outputs might be output under wrong asset! - - Repair action will use Context asset value (from Workfiles or Launcher) - Closing and reopening with Workfiles will refresh Context value. - """ - - label = "Validate Context With Error" - hosts = ["testhost"] - actions = [ValidateInstanceAssetRepair] - order = pyblish.api.ValidatorOrder - - def process(self, context): - raise PublishValidationError("Crashing", "Context error", description) diff --git a/openpype/hosts/testhost/plugins/publish/validate_with_error.py b/openpype/hosts/testhost/plugins/publish/validate_with_error.py deleted file mode 100644 index 5a2888a8b0..0000000000 --- a/openpype/hosts/testhost/plugins/publish/validate_with_error.py +++ /dev/null @@ -1,57 +0,0 @@ -import pyblish.api -from openpype.pipeline import PublishValidationError - - -class ValidateInstanceAssetRepair(pyblish.api.Action): - """Repair the instance asset.""" - - label = "Repair" - icon = "wrench" - on = "failed" - - def process(self, context, plugin): - pass - - -description = """ -## Publish plugins - -### Validate Scene Settings - -#### Skip Resolution Check for Tasks - -Set regex pattern(s) to look for in a Task name to skip resolution check against values from DB. - -#### Skip Timeline Check for Tasks - -Set regex pattern(s) to look for in a Task name to skip `frameStart`, `frameEnd` check against values from DB. - -### AfterEffects Submit to Deadline - -* `Use Published scene` - Set to True (green) when Deadline should take published scene as a source instead of uploaded local one. -* `Priority` - priority of job on farm -* `Primary Pool` - here is list of pool fetched from server you can select from. -* `Secondary Pool` -* `Frames Per Task` - number of sequence division between individual tasks (chunks) -making one job on farm. -""" - - -class ValidateWithError(pyblish.api.InstancePlugin): - """Validate the instance asset is the current selected context asset. - - As it might happen that multiple worfiles are opened, switching - between them would mess with selected context. - In that case outputs might be output under wrong asset! - - Repair action will use Context asset value (from Workfiles or Launcher) - Closing and reopening with Workfiles will refresh Context value. - """ - - label = "Validate With Error" - hosts = ["testhost"] - actions = [ValidateInstanceAssetRepair] - order = pyblish.api.ValidatorOrder - - def process(self, instance): - raise PublishValidationError("Crashing", "Instance error", description) diff --git a/openpype/hosts/testhost/run_publish.py b/openpype/hosts/testhost/run_publish.py deleted file mode 100644 index c7ad63aafd..0000000000 --- a/openpype/hosts/testhost/run_publish.py +++ /dev/null @@ -1,68 +0,0 @@ -import os -import sys - -mongo_url = "" -project_name = "" -asset_name = "" -task_name = "" -ftrack_url = "" -ftrack_username = "" -ftrack_api_key = "" - - -def multi_dirname(path, times=1): - for _ in range(times): - path = os.path.dirname(path) - return path - - -host_name = "testhost" -current_file = os.path.abspath(__file__) -openpype_dir = multi_dirname(current_file, 4) - -os.environ["OPENPYPE_MONGO"] = mongo_url -os.environ["OPENPYPE_ROOT"] = openpype_dir -os.environ["AVALON_PROJECT"] = project_name -os.environ["AVALON_ASSET"] = asset_name -os.environ["AVALON_TASK"] = task_name -os.environ["AVALON_APP"] = host_name -os.environ["OPENPYPE_DATABASE_NAME"] = "openpype" -os.environ["AVALON_TIMEOUT"] = "1000" -os.environ["AVALON_DB"] = "avalon" -os.environ["FTRACK_SERVER"] = ftrack_url -os.environ["FTRACK_API_USER"] = ftrack_username -os.environ["FTRACK_API_KEY"] = ftrack_api_key -for path in [ - openpype_dir, - r"{}\repos\avalon-core".format(openpype_dir), - r"{}\.venv\Lib\site-packages".format(openpype_dir) -]: - sys.path.append(path) - -from Qt import QtWidgets, QtCore - -from openpype.tools.publisher.window import PublisherWindow - - -def main(): - """Main function for testing purposes.""" - import pyblish.api - from openpype.pipeline import install_host - from openpype.modules import ModulesManager - from openpype.hosts.testhost import api as testhost - - manager = ModulesManager() - for plugin_path in manager.collect_plugin_paths()["publish"]: - pyblish.api.register_plugin_path(plugin_path) - - install_host(testhost) - - QtWidgets.QApplication.setAttribute(QtCore.Qt.AA_EnableHighDpiScaling) - app = QtWidgets.QApplication([]) - window = PublisherWindow() - window.show() - app.exec_() - - -if __name__ == "__main__": - main() diff --git a/openpype/hosts/traypublisher/__init__.py b/openpype/hosts/traypublisher/__init__.py index 4eb7bf3eef..77ba908ddd 100644 --- a/openpype/hosts/traypublisher/__init__.py +++ b/openpype/hosts/traypublisher/__init__.py @@ -1,6 +1,6 @@ -from .module import TrayPublishModule +from .addon import TrayPublishAddon __all__ = ( - "TrayPublishModule", + "TrayPublishAddon", ) diff --git a/openpype/hosts/traypublisher/module.py b/openpype/hosts/traypublisher/addon.py similarity index 82% rename from openpype/hosts/traypublisher/module.py rename to openpype/hosts/traypublisher/addon.py index 92a2312fec..c86c835ed9 100644 --- a/openpype/hosts/traypublisher/module.py +++ b/openpype/hosts/traypublisher/addon.py @@ -5,15 +5,15 @@ import click from openpype.lib import get_openpype_execute_args from openpype.lib.execute import run_detached_process from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import ITrayAction, IHostModule +from openpype.modules.interfaces import ITrayAction, IHostAddon TRAYPUBLISH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class TrayPublishModule(OpenPypeModule, IHostModule, ITrayAction): +class TrayPublishAddon(OpenPypeModule, IHostAddon, ITrayAction): label = "New Publish (beta)" - name = "traypublish_tool" - host_name = "traypublish" + name = "traypublisher" + host_name = "traypublisher" def initialize(self, modules_settings): self.enabled = True @@ -28,7 +28,7 @@ class TrayPublishModule(OpenPypeModule, IHostModule, ITrayAction): self._experimental_tools = ExperimentalTools() def tray_menu(self, *args, **kwargs): - super(TrayPublishModule, self).tray_menu(*args, **kwargs) + super(TrayPublishAddon, self).tray_menu(*args, **kwargs) traypublisher = self._experimental_tools.get("traypublisher") visible = False if traypublisher and traypublisher.enabled: @@ -53,7 +53,7 @@ class TrayPublishModule(OpenPypeModule, IHostModule, ITrayAction): click_group.add_command(cli_main) -@click.group(TrayPublishModule.name, help="TrayPublisher related commands.") +@click.group(TrayPublishAddon.name, help="TrayPublisher related commands.") def cli_main(): pass diff --git a/openpype/hosts/traypublisher/api/pipeline.py b/openpype/hosts/traypublisher/api/pipeline.py index 2d9db7801e..0a8ddaa343 100644 --- a/openpype/hosts/traypublisher/api/pipeline.py +++ b/openpype/hosts/traypublisher/api/pipeline.py @@ -9,7 +9,7 @@ from openpype.pipeline import ( register_creator_plugin_path, legacy_io, ) -from openpype.host import HostBase, INewPublisher +from openpype.host import HostBase, IPublishHost ROOT_DIR = os.path.dirname(os.path.dirname( @@ -19,7 +19,7 @@ PUBLISH_PATH = os.path.join(ROOT_DIR, "plugins", "publish") CREATE_PATH = os.path.join(ROOT_DIR, "plugins", "create") -class TrayPublisherHost(HostBase, INewPublisher): +class TrayPublisherHost(HostBase, IPublishHost): name = "traypublisher" def install(self): diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index a3eead51c8..555041d389 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -11,35 +11,33 @@ from .pipeline import ( remove_instances, HostContext, ) +from openpype.lib.transcoding import IMAGE_EXTENSIONS, VIDEO_EXTENSIONS + -IMAGE_EXTENSIONS = [ - ".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave", ".cal", - ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr", ".fits", - ".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc", ".icer", - ".icns", ".ico", ".cur", ".ics", ".ilbm", ".jbig", ".jbig2", - ".jng", ".jpeg", ".jpeg-ls", ".jpeg", ".2000", ".jpg", ".xr", - ".jpeg", ".xt", ".jpeg-hdr", ".kra", ".mng", ".miff", ".nrrd", - ".ora", ".pam", ".pbm", ".pgm", ".ppm", ".pnm", ".pcx", ".pgf", - ".pictor", ".png", ".psb", ".psp", ".qtvr", ".ras", - ".rgbe", ".logluv", ".tiff", ".sgi", ".tga", ".tiff", ".tiff/ep", - ".tiff/it", ".ufo", ".ufp", ".wbmp", ".webp", ".xbm", ".xcf", - ".xpm", ".xwd" -] -VIDEO_EXTENSIONS = [ - ".3g2", ".3gp", ".amv", ".asf", ".avi", ".drc", ".f4a", ".f4b", - ".f4p", ".f4v", ".flv", ".gif", ".gifv", ".m2v", ".m4p", ".m4v", - ".mkv", ".mng", ".mov", ".mp2", ".mp4", ".mpe", ".mpeg", ".mpg", - ".mpv", ".mxf", ".nsv", ".ogg", ".ogv", ".qt", ".rm", ".rmvb", - ".roq", ".svi", ".vob", ".webm", ".wmv", ".yuv" -] REVIEW_EXTENSIONS = IMAGE_EXTENSIONS + VIDEO_EXTENSIONS +def _cache_and_get_instances(creator): + """Cache instances in shared data. + + Args: + creator (Creator): Plugin which would like to get instances from host. + + Returns: + List[Dict[str, Any]]: Cached instances list from host implementation. + """ + + shared_key = "openpype.traypublisher.instances" + if shared_key not in creator.collection_shared_data: + creator.collection_shared_data[shared_key] = list_instances() + return creator.collection_shared_data[shared_key] + + class HiddenTrayPublishCreator(HiddenCreator): host_name = "traypublisher" def collect_instances(self): - for instance_data in list_instances(): + for instance_data in _cache_and_get_instances(self): creator_id = instance_data.get("creator_identifier") if creator_id == self.identifier: instance = CreatedInstance.from_existing( @@ -76,7 +74,7 @@ class TrayPublishCreator(Creator): host_name = "traypublisher" def collect_instances(self): - for instance_data in list_instances(): + for instance_data in _cache_and_get_instances(self): creator_id = instance_data.get("creator_identifier") if creator_id == self.identifier: instance = CreatedInstance.from_existing( @@ -104,6 +102,8 @@ class TrayPublishCreator(Creator): # Host implementation of storing metadata about instance HostContext.add_instance(new_instance.data_to_store()) + new_instance.mark_as_stored() + # Add instance to current context self._add_instance_to_context(new_instance) diff --git a/openpype/hosts/traypublisher/plugins/create/create_from_settings.py b/openpype/hosts/traypublisher/plugins/create/create_from_settings.py index 41c1c29bb0..df6253b0c2 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_from_settings.py +++ b/openpype/hosts/traypublisher/plugins/create/create_from_settings.py @@ -1,5 +1,6 @@ import os -from openpype.api import get_project_settings, Logger +from openpype.lib import Logger +from openpype.settings import get_project_settings log = Logger.get_logger(__name__) diff --git a/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py b/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py index c5f0d6b75e..cf25a37918 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py +++ b/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py @@ -6,13 +6,15 @@ from openpype.client import get_assets, get_asset_by_name from openpype.lib import ( FileDef, BoolDef, - get_subset_name_with_asset_doc, - TaskNotSetError, ) from openpype.pipeline import ( CreatedInstance, CreatorError ) +from openpype.pipeline.create import ( + get_subset_name, + TaskNotSetError, +) from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator @@ -130,7 +132,7 @@ class BatchMovieCreator(TrayPublishCreator): task_name = self._get_task_name(asset_doc) try: - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( self.family, variant, task_name, @@ -143,7 +145,7 @@ class BatchMovieCreator(TrayPublishCreator): # but user have ability to change it # NOTE: This expect that there is not task 'Undefined' on asset task_name = "Undefined" - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( self.family, variant, task_name, @@ -186,6 +188,7 @@ class BatchMovieCreator(TrayPublishCreator): folders=False, single_item=False, extensions=self.extensions, + allow_sequences=False, label="Filepath" ), BoolDef( diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py b/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py index f37e04d1c9..3d93e2c927 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py @@ -35,12 +35,12 @@ class CollectMovieBatch( "stagingDir": os.path.dirname(file_url), "tags": [] } + instance.data["representations"].append(repre) if creator_attributes["add_review_family"]: repre["tags"].append("review") instance.data["families"].append("review") - - instance.data["representations"].append(repre) + instance.data["thumbnailSource"] = file_url instance.data["source"] = file_url diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index c0ae694c3c..7035a61d7b 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -1,5 +1,6 @@ import os import tempfile +from pathlib import Path import clique import pyblish.api @@ -69,9 +70,17 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): repre_names, representation_files_mapping ) - + source_filepaths = list(set(source_filepaths)) instance.data["source"] = source - instance.data["sourceFilepaths"] = list(set(source_filepaths)) + instance.data["sourceFilepaths"] = source_filepaths + + # NOTE: Missing filepaths should not cause crashes (at least not here) + # - if filepaths are required they should crash on validation + if source_filepaths: + # NOTE: Original basename is not handling sequences + # - we should maybe not fill the key when sequence is used? + origin_basename = Path(source_filepaths[0]).stem + instance.data["originalBasename"] = origin_basename self.log.debug( ( @@ -148,8 +157,11 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): )) return + item_dir = review_file_item["directory"] + first_filepath = os.path.join(item_dir, filenames[0]) + filepaths = { - os.path.join(review_file_item["directory"], filename) + os.path.join(item_dir, filename) for filename in filenames } source_filepaths.extend(filepaths) @@ -176,6 +188,8 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): if "review" not in instance.data["families"]: instance.data["families"].append("review") + instance.data["thumbnailSource"] = first_filepath + review_representation["tags"].append("review") self.log.debug("Representation {} was marked for review. {}".format( review_representation["name"], review_path diff --git a/openpype/hosts/traypublisher/plugins/publish/extract_thumbnail.py b/openpype/hosts/traypublisher/plugins/publish/extract_thumbnail.py new file mode 100644 index 0000000000..7781bb7b3e --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/extract_thumbnail.py @@ -0,0 +1,173 @@ +"""Create instance thumbnail from "thumbnailSource" on 'instance.data'. + +Output is new representation with "thumbnail" name on instance. If instance +already have such representation the process is skipped. + +This way a collector can point to a file from which should be thumbnail +generated. This is different approach then what global plugin for thumbnails +does. The global plugin has specific logic which does not support + +Todos: + No size handling. Size of input is used for output thumbnail which can + cause issues. +""" + +import os +import tempfile + +import pyblish.api +from openpype.lib import ( + get_ffmpeg_tool_path, + get_oiio_tools_path, + is_oiio_supported, + + run_subprocess, +) + + +class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): + """Create jpg thumbnail for instance based on 'thumbnailSource'. + + Thumbnail source must be a single image or video filepath. + """ + + label = "Extract Thumbnail (from source)" + # Before 'ExtractThumbnail' in global plugins + order = pyblish.api.ExtractorOrder - 0.00001 + hosts = ["traypublisher"] + + def process(self, instance): + subset_name = instance.data["subset"] + self.log.info( + "Processing instance with subset name {}".format(subset_name) + ) + + thumbnail_source = instance.data.get("thumbnailSource") + if not thumbnail_source: + self.log.debug("Thumbnail source not filled. Skipping.") + return + + elif not os.path.exists(thumbnail_source): + self.log.debug( + "Thumbnail source file was not found {}. Skipping.".format( + thumbnail_source)) + return + + # Check if already has thumbnail created + if self._already_has_thumbnail(instance): + self.log.info("Thumbnail representation already present.") + return + + # Create temp directory for thumbnail + # - this is to avoid "override" of source file + dst_staging = tempfile.mkdtemp(prefix="pyblish_tmp_") + self.log.debug( + "Create temp directory {} for thumbnail".format(dst_staging) + ) + # Store new staging to cleanup paths + instance.context.data["cleanupFullPaths"].append(dst_staging) + + thumbnail_created = False + oiio_supported = is_oiio_supported() + + self.log.info("Thumbnail source: {}".format(thumbnail_source)) + src_basename = os.path.basename(thumbnail_source) + dst_filename = os.path.splitext(src_basename)[0] + ".jpg" + full_output_path = os.path.join(dst_staging, dst_filename) + + if oiio_supported: + self.log.info("Trying to convert with OIIO") + # If the input can read by OIIO then use OIIO method for + # conversion otherwise use ffmpeg + thumbnail_created = self.create_thumbnail_oiio( + thumbnail_source, full_output_path + ) + + # Try to use FFMPEG if OIIO is not supported or for cases when + # oiiotool isn't available + if not thumbnail_created: + if oiio_supported: + self.log.info(( + "Converting with FFMPEG because input" + " can't be read by OIIO." + )) + + thumbnail_created = self.create_thumbnail_ffmpeg( + thumbnail_source, full_output_path + ) + + # Skip representation and try next one if wasn't created + if not thumbnail_created: + self.log.warning("Thumbanil has not been created.") + return + + new_repre = { + "name": "thumbnail", + "ext": "jpg", + "files": dst_filename, + "stagingDir": dst_staging, + "thumbnail": True, + "tags": ["thumbnail"] + } + + # adding representation + self.log.debug( + "Adding thumbnail representation: {}".format(new_repre) + ) + instance.data["representations"].append(new_repre) + + def _already_has_thumbnail(self, instance): + if "representations" not in instance.data: + self.log.warning( + "Instance does not have 'representations' key filled" + ) + instance.data["representations"] = [] + + for repre in instance.data["representations"]: + if repre["name"] == "thumbnail": + return True + return False + + def create_thumbnail_oiio(self, src_path, dst_path): + self.log.info("outputting {}".format(dst_path)) + oiio_tool_path = get_oiio_tools_path() + oiio_cmd = [ + oiio_tool_path, + "-a", src_path, + "-o", dst_path + ] + self.log.info("Running: {}".format(" ".join(oiio_cmd))) + try: + run_subprocess(oiio_cmd, logger=self.log) + return True + except Exception: + self.log.warning( + "Failed to create thubmnail using oiiotool", + exc_info=True + ) + return False + + def create_thumbnail_ffmpeg(self, src_path, dst_path): + ffmpeg_path = get_ffmpeg_tool_path("ffmpeg") + + max_int = str(2147483647) + ffmpeg_cmd = [ + ffmpeg_path, + "-y", + "-analyzeduration", max_int, + "-probesize", max_int, + "-i", src_path, + "-vframes", "1", + dst_path + ] + + self.log.info("Running: {}".format(" ".join(ffmpeg_cmd))) + try: + run_subprocess(ffmpeg_cmd, logger=self.log) + return True + except Exception: + self.log.warning( + "Failed to create thubmnail using ffmpeg", + exc_info=True + ) + return False diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py b/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py index 947624100a..b962ea464a 100644 --- a/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py +++ b/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py @@ -2,10 +2,10 @@ import re import pyblish.api -import openpype.api -from openpype.pipeline import ( +from openpype.pipeline.publish import ( + ValidateContentsOrder, PublishXmlValidationError, - OptionalPyblishPluginMixin + OptionalPyblishPluginMixin, ) @@ -16,7 +16,7 @@ class ValidateFrameRange(OptionalPyblishPluginMixin, label = "Validate Frame Range" hosts = ["traypublisher"] families = ["render"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder optional = True # published data might be sequence (.mov, .mp4) in that counting files diff --git a/openpype/hosts/tvpaint/__init__.py b/openpype/hosts/tvpaint/__init__.py index 09b7c52cd1..b98680f204 100644 --- a/openpype/hosts/tvpaint/__init__.py +++ b/openpype/hosts/tvpaint/__init__.py @@ -1,20 +1,12 @@ -import os +from .addon import ( + get_launch_script_path, + TVPaintAddon, + TVPAINT_ROOT_DIR, +) -def add_implementation_envs(env, _app): - """Modify environments to contain all required for implementation.""" - defaults = { - "OPENPYPE_LOG_NO_COLORS": "True" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value - - -def get_launch_script_path(): - current_dir = os.path.dirname(os.path.abspath(__file__)) - return os.path.join( - current_dir, - "api", - "launch_script.py" - ) +__all__ = ( + "get_launch_script_path", + "TVPaintAddon", + "TVPAINT_ROOT_DIR", +) diff --git a/openpype/hosts/tvpaint/addon.py b/openpype/hosts/tvpaint/addon.py new file mode 100644 index 0000000000..d710e63f93 --- /dev/null +++ b/openpype/hosts/tvpaint/addon.py @@ -0,0 +1,41 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +TVPAINT_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +def get_launch_script_path(): + return os.path.join( + TVPAINT_ROOT_DIR, + "api", + "launch_script.py" + ) + + +class TVPaintAddon(OpenPypeModule, IHostAddon): + name = "tvpaint" + host_name = "tvpaint" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + """Modify environments to contain all required for implementation.""" + + defaults = { + "OPENPYPE_LOG_NO_COLORS": "True" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(TVPAINT_ROOT_DIR, "hooks") + ] + + def get_workfile_extensions(self): + return [".tvpp"] diff --git a/openpype/hosts/tvpaint/api/__init__.py b/openpype/hosts/tvpaint/api/__init__.py index c461b33f4b..7b53aad9a4 100644 --- a/openpype/hosts/tvpaint/api/__init__.py +++ b/openpype/hosts/tvpaint/api/__init__.py @@ -1,49 +1,11 @@ from .communication_server import CommunicationWrapper -from . import lib -from . import launch_script -from . import workio -from . import pipeline -from . import plugin from .pipeline import ( - install, - uninstall, - maintained_selection, - remove_instance, - list_instances, - ls -) - -from .workio import ( - open_file, - save_file, - current_file, - has_unsaved_changes, - file_extensions, - work_root, + TVPaintHost, ) __all__ = ( "CommunicationWrapper", - "lib", - "launch_script", - "workio", - "pipeline", - "plugin", - - "install", - "uninstall", - "maintained_selection", - "remove_instance", - "list_instances", - "ls", - - # Workfiles API - "open_file", - "save_file", - "current_file", - "has_unsaved_changes", - "file_extensions", - "work_root" + "TVPaintHost", ) diff --git a/openpype/hosts/tvpaint/api/launch_script.py b/openpype/hosts/tvpaint/api/launch_script.py index 0b25027fc6..c474a10529 100644 --- a/openpype/hosts/tvpaint/api/launch_script.py +++ b/openpype/hosts/tvpaint/api/launch_script.py @@ -10,10 +10,10 @@ from Qt import QtWidgets, QtCore, QtGui from openpype import style from openpype.pipeline import install_host -from openpype.hosts.tvpaint.api.communication_server import ( - CommunicationWrapper +from openpype.hosts.tvpaint.api import ( + TVPaintHost, + CommunicationWrapper, ) -from openpype.hosts.tvpaint import api as tvpaint_host log = logging.getLogger(__name__) @@ -30,6 +30,7 @@ def main(launch_args): # - QApplicaiton is also main thread/event loop of the server qt_app = QtWidgets.QApplication([]) + tvpaint_host = TVPaintHost() # Execute pipeline installation install_host(tvpaint_host) diff --git a/openpype/hosts/tvpaint/api/lib.py b/openpype/hosts/tvpaint/api/lib.py index a341f48859..5e64773b8e 100644 --- a/openpype/hosts/tvpaint/api/lib.py +++ b/openpype/hosts/tvpaint/api/lib.py @@ -2,7 +2,7 @@ import os import logging import tempfile -from . import CommunicationWrapper +from .communication_server import CommunicationWrapper log = logging.getLogger(__name__) diff --git a/openpype/hosts/tvpaint/api/pipeline.py b/openpype/hosts/tvpaint/api/pipeline.py index 0118c0104b..249326791b 100644 --- a/openpype/hosts/tvpaint/api/pipeline.py +++ b/openpype/hosts/tvpaint/api/pipeline.py @@ -1,6 +1,5 @@ import os import json -import contextlib import tempfile import logging @@ -9,15 +8,14 @@ import requests import pyblish.api from openpype.client import get_project, get_asset_by_name -from openpype.hosts import tvpaint -from openpype.api import get_current_project_settings +from openpype.host import HostBase, IWorkfileHost, ILoadHost +from openpype.hosts.tvpaint import TVPAINT_ROOT_DIR +from openpype.settings import get_current_project_settings from openpype.lib import register_event_callback from openpype.pipeline import ( legacy_io, register_loader_plugin_path, register_creator_plugin_path, - deregister_loader_plugin_path, - deregister_creator_plugin_path, AVALON_CONTAINER_ID, ) @@ -28,11 +26,6 @@ from .lib import ( log = logging.getLogger(__name__) -HOST_DIR = os.path.dirname(os.path.abspath(tvpaint.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") -PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") -LOAD_PATH = os.path.join(PLUGINS_DIR, "load") -CREATE_PATH = os.path.join(PLUGINS_DIR, "create") METADATA_SECTION = "avalon" SECTION_NAME_CONTEXT = "context" @@ -65,43 +58,152 @@ instances=2 """ -def install(): - """Install TVPaint-specific functionality.""" +class TVPaintHost(HostBase, IWorkfileHost, ILoadHost): + name = "tvpaint" - log.info("OpenPype - Installing TVPaint integration") - legacy_io.install() + def install(self): + """Install TVPaint-specific functionality.""" - # Create workdir folder if does not exist yet - workdir = legacy_io.Session["AVALON_WORKDIR"] - if not os.path.exists(workdir): - os.makedirs(workdir) + log.info("OpenPype - Installing TVPaint integration") + legacy_io.install() - pyblish.api.register_host("tvpaint") - pyblish.api.register_plugin_path(PUBLISH_PATH) - register_loader_plugin_path(LOAD_PATH) - register_creator_plugin_path(CREATE_PATH) + # Create workdir folder if does not exist yet + workdir = legacy_io.Session["AVALON_WORKDIR"] + if not os.path.exists(workdir): + os.makedirs(workdir) - registered_callbacks = ( - pyblish.api.registered_callbacks().get("instanceToggled") or [] - ) - if on_instance_toggle not in registered_callbacks: - pyblish.api.register_callback("instanceToggled", on_instance_toggle) + plugins_dir = os.path.join(TVPAINT_ROOT_DIR, "plugins") + publish_dir = os.path.join(plugins_dir, "publish") + load_dir = os.path.join(plugins_dir, "load") + create_dir = os.path.join(plugins_dir, "create") - register_event_callback("application.launched", initial_launch) - register_event_callback("application.exit", application_exit) + pyblish.api.register_host("tvpaint") + pyblish.api.register_plugin_path(publish_dir) + register_loader_plugin_path(load_dir) + register_creator_plugin_path(create_dir) + registered_callbacks = ( + pyblish.api.registered_callbacks().get("instanceToggled") or [] + ) + if self.on_instance_toggle not in registered_callbacks: + pyblish.api.register_callback( + "instanceToggled", self.on_instance_toggle + ) -def uninstall(): - """Uninstall TVPaint-specific functionality. + register_event_callback("application.launched", self.initial_launch) + register_event_callback("application.exit", self.application_exit) - This function is called automatically on calling `uninstall_host()`. - """ + def open_workfile(self, filepath): + george_script = "tv_LoadProject '\"'\"{}\"'\"'".format( + filepath.replace("\\", "/") + ) + return execute_george_through_file(george_script) - log.info("OpenPype - Uninstalling TVPaint integration") - pyblish.api.deregister_host("tvpaint") - pyblish.api.deregister_plugin_path(PUBLISH_PATH) - deregister_loader_plugin_path(LOAD_PATH) - deregister_creator_plugin_path(CREATE_PATH) + def save_workfile(self, filepath=None): + if not filepath: + filepath = self.get_current_workfile() + context = { + "project": legacy_io.Session["AVALON_PROJECT"], + "asset": legacy_io.Session["AVALON_ASSET"], + "task": legacy_io.Session["AVALON_TASK"] + } + save_current_workfile_context(context) + + # Execute george script to save workfile. + george_script = "tv_SaveProject {}".format(filepath.replace("\\", "/")) + return execute_george(george_script) + + def work_root(self, session): + return session["AVALON_WORKDIR"] + + def get_current_workfile(self): + return execute_george("tv_GetProjectName") + + def workfile_has_unsaved_changes(self): + return None + + def get_workfile_extensions(self): + return [".tvpp"] + + def get_containers(self): + return get_containers() + + def initial_launch(self): + # Setup project settings if its the template that's launched. + # TODO also check for template creation when it's possible to define + # templates + last_workfile = os.environ.get("AVALON_LAST_WORKFILE") + if not last_workfile or os.path.exists(last_workfile): + return + + log.info("Setting up project...") + set_context_settings() + + def remove_instance(self, instance): + """Remove instance from current workfile metadata. + + Implementation for Subset manager tool. + """ + + current_instances = get_workfile_metadata(SECTION_NAME_INSTANCES) + instance_id = instance.get("uuid") + found_idx = None + if instance_id: + for idx, _inst in enumerate(current_instances): + if _inst["uuid"] == instance_id: + found_idx = idx + break + + if found_idx is None: + return + current_instances.pop(found_idx) + write_instances(current_instances) + + def application_exit(self): + """Logic related to TimerManager. + + Todo: + This should be handled out of TVPaint integration logic. + """ + + data = get_current_project_settings() + stop_timer = data["tvpaint"]["stop_timer_on_application_exit"] + + if not stop_timer: + return + + # Stop application timer. + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") + rest_api_url = "{}/timers_manager/stop_timer".format(webserver_url) + requests.post(rest_api_url) + + def on_instance_toggle(self, instance, old_value, new_value): + """Update instance data in workfile on publish toggle.""" + # Review may not have real instance in wokrfile metadata + if not instance.data.get("uuid"): + return + + instance_id = instance.data["uuid"] + found_idx = None + current_instances = list_instances() + for idx, workfile_instance in enumerate(current_instances): + if workfile_instance["uuid"] == instance_id: + found_idx = idx + break + + if found_idx is None: + return + + if "active" in current_instances[found_idx]: + current_instances[found_idx]["active"] = new_value + self.write_instances(current_instances) + + def list_instances(self): + """List all created instances from current workfile.""" + return list_instances() + + def write_instances(self, data): + return write_instances(data) def containerise( @@ -131,7 +233,7 @@ def containerise( "representation": str(context["representation"]["_id"]) } if current_containers is None: - current_containers = ls() + current_containers = get_containers() # Add container to containers list current_containers.append(container_data) @@ -142,15 +244,6 @@ def containerise( return container_data -@contextlib.contextmanager -def maintained_selection(): - # TODO implement logic - try: - yield - finally: - pass - - def split_metadata_string(text, chunk_length=None): """Split string by length. @@ -348,23 +441,6 @@ def save_current_workfile_context(context): return write_workfile_metadata(SECTION_NAME_CONTEXT, context) -def remove_instance(instance): - """Remove instance from current workfile metadata.""" - current_instances = get_workfile_metadata(SECTION_NAME_INSTANCES) - instance_id = instance.get("uuid") - found_idx = None - if instance_id: - for idx, _inst in enumerate(current_instances): - if _inst["uuid"] == instance_id: - found_idx = idx - break - - if found_idx is None: - return - current_instances.pop(found_idx) - write_instances(current_instances) - - def list_instances(): """List all created instances from current workfile.""" return get_workfile_metadata(SECTION_NAME_INSTANCES) @@ -374,12 +450,7 @@ def write_instances(data): return write_workfile_metadata(SECTION_NAME_INSTANCES, data) -# Backwards compatibility -def _write_instances(*args, **kwargs): - return write_instances(*args, **kwargs) - - -def ls(): +def get_containers(): output = get_workfile_metadata(SECTION_NAME_CONTAINERS) if output: for item in output: @@ -391,53 +462,6 @@ def ls(): return output -def on_instance_toggle(instance, old_value, new_value): - """Update instance data in workfile on publish toggle.""" - # Review may not have real instance in wokrfile metadata - if not instance.data.get("uuid"): - return - - instance_id = instance.data["uuid"] - found_idx = None - current_instances = list_instances() - for idx, workfile_instance in enumerate(current_instances): - if workfile_instance["uuid"] == instance_id: - found_idx = idx - break - - if found_idx is None: - return - - if "active" in current_instances[found_idx]: - current_instances[found_idx]["active"] = new_value - write_instances(current_instances) - - -def initial_launch(): - # Setup project settings if its the template that's launched. - # TODO also check for template creation when it's possible to define - # templates - last_workfile = os.environ.get("AVALON_LAST_WORKFILE") - if not last_workfile or os.path.exists(last_workfile): - return - - log.info("Setting up project...") - set_context_settings() - - -def application_exit(): - data = get_current_project_settings() - stop_timer = data["tvpaint"]["stop_timer_on_application_exit"] - - if not stop_timer: - return - - # Stop application timer. - webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") - rest_api_url = "{}/timers_manager/stop_timer".format(webserver_url) - requests.post(rest_api_url) - - def set_context_settings(asset_doc=None): """Set workfile settings by asset document data. diff --git a/openpype/hosts/tvpaint/api/plugin.py b/openpype/hosts/tvpaint/api/plugin.py index 15ad8905e0..da456e7067 100644 --- a/openpype/hosts/tvpaint/api/plugin.py +++ b/openpype/hosts/tvpaint/api/plugin.py @@ -4,11 +4,11 @@ import uuid from openpype.pipeline import ( LegacyCreator, LoaderPlugin, + registered_host, ) -from openpype.hosts.tvpaint.api import ( - pipeline, - lib -) + +from .lib import get_layers_data +from .pipeline import get_current_workfile_context class Creator(LegacyCreator): @@ -22,7 +22,7 @@ class Creator(LegacyCreator): dynamic_data = super(Creator, cls).get_dynamic_data(*args, **kwargs) # Change asset and name by current workfile context - workfile_context = pipeline.get_current_workfile_context() + workfile_context = get_current_workfile_context() asset_name = workfile_context.get("asset") task_name = workfile_context.get("task") if "asset" not in dynamic_data and asset_name: @@ -67,10 +67,12 @@ class Creator(LegacyCreator): self.log.debug( "Storing instance data to workfile. {}".format(str(data)) ) - return pipeline.write_instances(data) + host = registered_host() + return host.write_instances(data) def process(self): - data = pipeline.list_instances() + host = registered_host() + data = host.list_instances() data.append(self.data) self.write_instances(data) @@ -108,7 +110,7 @@ class Loader(LoaderPlugin): counter_regex = re.compile(r"_(\d{3})$") higher_counter = 0 - for layer in lib.get_layers_data(): + for layer in get_layers_data(): layer_name = layer["name"] if not layer_name.startswith(layer_name_base): continue diff --git a/openpype/hosts/tvpaint/api/workio.py b/openpype/hosts/tvpaint/api/workio.py deleted file mode 100644 index 1a5ad00ca8..0000000000 --- a/openpype/hosts/tvpaint/api/workio.py +++ /dev/null @@ -1,58 +0,0 @@ -"""Host API required for Work Files. -# TODO @iLLiCiT implement functions: - has_unsaved_changes -""" - -from openpype.pipeline import ( - HOST_WORKFILE_EXTENSIONS, - legacy_io, -) -from .lib import ( - execute_george, - execute_george_through_file -) -from .pipeline import save_current_workfile_context - - -def open_file(filepath): - """Open the scene file in Blender.""" - george_script = "tv_LoadProject '\"'\"{}\"'\"'".format( - filepath.replace("\\", "/") - ) - return execute_george_through_file(george_script) - - -def save_file(filepath): - """Save the open scene file.""" - # Store context to workfile before save - context = { - "project": legacy_io.Session["AVALON_PROJECT"], - "asset": legacy_io.Session["AVALON_ASSET"], - "task": legacy_io.Session["AVALON_TASK"] - } - save_current_workfile_context(context) - - # Execute george script to save workfile. - george_script = "tv_SaveProject {}".format(filepath.replace("\\", "/")) - return execute_george(george_script) - - -def current_file(): - """Return the path of the open scene file.""" - george_script = "tv_GetProjectName" - return execute_george(george_script) - - -def has_unsaved_changes(): - """Does the open scene file have unsaved changes?""" - return False - - -def file_extensions(): - """Return the supported file extensions for Blender scene files.""" - return HOST_WORKFILE_EXTENSIONS["tvpaint"] - - -def work_root(session): - """Return the default root to browse for work files.""" - return session["AVALON_WORKDIR"] diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py index c67ab1e4fb..95653b6ecb 100644 --- a/openpype/hosts/tvpaint/lib.py +++ b/openpype/hosts/tvpaint/lib.py @@ -646,9 +646,6 @@ def rename_filepaths_by_frame_start( filepaths_by_frame, range_start, range_end, new_frame_start ): """Change frames in filenames of finished images to new frame start.""" - # Skip if source first frame is same as destination first frame - if range_start == new_frame_start: - return # Calculate frame end new_frame_end = range_end + (new_frame_start - range_start) @@ -669,14 +666,17 @@ def rename_filepaths_by_frame_start( source_range = range(range_start, range_end + 1) output_range = range(new_frame_start, new_frame_end + 1) + # Skip if source first frame is same as destination first frame new_dst_filepaths = {} for src_frame, dst_frame in zip(source_range, output_range): - src_filepath = filepaths_by_frame[src_frame] - src_dirpath = os.path.dirname(src_filepath) + src_filepath = os.path.normpath(filepaths_by_frame[src_frame]) + dirpath, src_filename = os.path.split(src_filepath) dst_filename = filename_template.format(frame=dst_frame) - dst_filepath = os.path.join(src_dirpath, dst_filename) + dst_filepath = os.path.join(dirpath, dst_filename) - os.rename(src_filepath, dst_filepath) + if src_filename != dst_filename: + os.rename(src_filepath, dst_filepath) new_dst_filepaths[dst_frame] = dst_filepath + return new_dst_filepaths diff --git a/openpype/hosts/tvpaint/plugins/create/create_render_layer.py b/openpype/hosts/tvpaint/plugins/create/create_render_layer.py index 3b5bd47189..a085830e96 100644 --- a/openpype/hosts/tvpaint/plugins/create/create_render_layer.py +++ b/openpype/hosts/tvpaint/plugins/create/create_render_layer.py @@ -1,11 +1,15 @@ -from openpype.pipeline import CreatorError from openpype.lib import prepare_template_data +from openpype.pipeline import CreatorError from openpype.hosts.tvpaint.api import ( plugin, - pipeline, - lib, CommunicationWrapper ) +from openpype.hosts.tvpaint.api.lib import ( + get_layers_data, + get_groups_data, + execute_george_through_file, +) +from openpype.hosts.tvpaint.api.pipeline import list_instances class CreateRenderlayer(plugin.Creator): @@ -63,7 +67,7 @@ class CreateRenderlayer(plugin.Creator): # Validate that communication is initialized if CommunicationWrapper.communicator: # Get currently selected layers - layers_data = lib.get_layers_data() + layers_data = get_layers_data() selected_layers = [ layer @@ -81,8 +85,8 @@ class CreateRenderlayer(plugin.Creator): def process(self): self.log.debug("Query data from workfile.") - instances = pipeline.list_instances() - layers_data = lib.get_layers_data() + instances = list_instances() + layers_data = get_layers_data() self.log.debug("Checking for selection groups.") # Collect group ids from selection @@ -109,7 +113,7 @@ class CreateRenderlayer(plugin.Creator): self.log.debug(f"Selected group id is \"{group_id}\".") self.data["group_id"] = group_id - group_data = lib.get_groups_data() + group_data = get_groups_data() group_name = None for group in group_data: if group["group_id"] == group_id: @@ -176,7 +180,7 @@ class CreateRenderlayer(plugin.Creator): return self.log.debug("Querying groups data from workfile.") - groups_data = lib.get_groups_data() + groups_data = get_groups_data() self.log.debug("Changing name of the group.") selected_group = None @@ -195,7 +199,7 @@ class CreateRenderlayer(plugin.Creator): b=selected_group["blue"], name=new_group_name ) - lib.execute_george_through_file(rename_script) + execute_george_through_file(rename_script) self.log.info( f"Name of group with index {group_id}" diff --git a/openpype/hosts/tvpaint/plugins/create/create_render_pass.py b/openpype/hosts/tvpaint/plugins/create/create_render_pass.py index 26fa8ac51a..a44cb29f20 100644 --- a/openpype/hosts/tvpaint/plugins/create/create_render_pass.py +++ b/openpype/hosts/tvpaint/plugins/create/create_render_pass.py @@ -2,10 +2,10 @@ from openpype.pipeline import CreatorError from openpype.lib import prepare_template_data from openpype.hosts.tvpaint.api import ( plugin, - pipeline, - lib, CommunicationWrapper ) +from openpype.hosts.tvpaint.api.lib import get_layers_data +from openpype.hosts.tvpaint.api.pipeline import list_instances class CreateRenderPass(plugin.Creator): @@ -54,7 +54,7 @@ class CreateRenderPass(plugin.Creator): # Validate that communication is initialized if CommunicationWrapper.communicator: # Get currently selected layers - layers_data = lib.layers_data() + layers_data = get_layers_data() selected_layers = [ layer @@ -72,8 +72,8 @@ class CreateRenderPass(plugin.Creator): def process(self): self.log.debug("Query data from workfile.") - instances = pipeline.list_instances() - layers_data = lib.layers_data() + instances = list_instances() + layers_data = get_layers_data() self.log.debug("Checking selection.") # Get all selected layers and their group ids diff --git a/openpype/hosts/tvpaint/plugins/load/load_image.py b/openpype/hosts/tvpaint/plugins/load/load_image.py index f861d0119e..151db94135 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_image.py +++ b/openpype/hosts/tvpaint/plugins/load/load_image.py @@ -1,5 +1,6 @@ import qargparse -from openpype.hosts.tvpaint.api import lib, plugin +from openpype.hosts.tvpaint.api import plugin +from openpype.hosts.tvpaint.api.lib import execute_george_through_file class ImportImage(plugin.Loader): @@ -79,4 +80,4 @@ class ImportImage(plugin.Loader): layer_name, load_options_str ) - return lib.execute_george_through_file(george_script) + return execute_george_through_file(george_script) diff --git a/openpype/hosts/tvpaint/plugins/load/load_reference_image.py b/openpype/hosts/tvpaint/plugins/load/load_reference_image.py index af1a4a9b6b..393236fba6 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_reference_image.py +++ b/openpype/hosts/tvpaint/plugins/load/load_reference_image.py @@ -1,7 +1,21 @@ import collections + import qargparse -from openpype.pipeline import get_representation_context -from openpype.hosts.tvpaint.api import lib, pipeline, plugin + +from openpype.pipeline import ( + get_representation_context, + register_host, +) +from openpype.hosts.tvpaint.api import plugin +from openpype.hosts.tvpaint.api.lib import ( + get_layers_data, + execute_george_through_file, +) +from openpype.hosts.tvpaint.api.pipeline import ( + write_workfile_metadata, + SECTION_NAME_CONTAINERS, + containerise, +) class LoadImage(plugin.Loader): @@ -79,10 +93,10 @@ class LoadImage(plugin.Loader): load_options_str ) - lib.execute_george_through_file(george_script) + execute_george_through_file(george_script) loaded_layer = None - layers = lib.layers_data() + layers = get_layers_data() for layer in layers: if layer["name"] == layer_name: loaded_layer = layer @@ -95,7 +109,7 @@ class LoadImage(plugin.Loader): layer_names = [loaded_layer["name"]] namespace = namespace or layer_name - return pipeline.containerise( + return containerise( name=name, namespace=namespace, members=layer_names, @@ -109,7 +123,7 @@ class LoadImage(plugin.Loader): return if layers is None: - layers = lib.layers_data() + layers = get_layers_data() available_ids = set(layer["layer_id"] for layer in layers) @@ -152,14 +166,15 @@ class LoadImage(plugin.Loader): line = "tv_layerkill {}".format(layer_id) george_script_lines.append(line) george_script = "\n".join(george_script_lines) - lib.execute_george_through_file(george_script) + execute_george_through_file(george_script) def _remove_container(self, container, members=None): if not container: return representation = container["representation"] members = self.get_members_from_container(container) - current_containers = pipeline.ls() + host = register_host() + current_containers = host.get_containers() pop_idx = None for idx, cur_con in enumerate(current_containers): cur_members = self.get_members_from_container(cur_con) @@ -179,8 +194,8 @@ class LoadImage(plugin.Loader): return current_containers.pop(pop_idx) - pipeline.write_workfile_metadata( - pipeline.SECTION_NAME_CONTAINERS, current_containers + write_workfile_metadata( + SECTION_NAME_CONTAINERS, current_containers ) def remove(self, container): @@ -214,7 +229,7 @@ class LoadImage(plugin.Loader): break old_layers = [] - layers = lib.layers_data() + layers = get_layers_data() previous_layer_ids = set(layer["layer_id"] for layer in layers) if old_layers_are_ids: for layer in layers: @@ -263,7 +278,7 @@ class LoadImage(plugin.Loader): new_container = self.load(context, name, namespace, {}) new_layer_names = self.get_members_from_container(new_container) - layers = lib.layers_data() + layers = get_layers_data() new_layers = [] for layer in layers: @@ -304,4 +319,4 @@ class LoadImage(plugin.Loader): # Execute george scripts if there are any if george_script_lines: george_script = "\n".join(george_script_lines) - lib.execute_george_through_file(george_script) + execute_george_through_file(george_script) diff --git a/openpype/hosts/tvpaint/plugins/load/load_sound.py b/openpype/hosts/tvpaint/plugins/load/load_sound.py index 3f42370f5c..f312db262a 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_sound.py +++ b/openpype/hosts/tvpaint/plugins/load/load_sound.py @@ -1,6 +1,9 @@ import os import tempfile -from openpype.hosts.tvpaint.api import lib, plugin +from openpype.hosts.tvpaint.api import plugin +from openpype.hosts.tvpaint.api.lib import ( + execute_george_through_file, +) class ImportSound(plugin.Loader): @@ -64,7 +67,7 @@ class ImportSound(plugin.Loader): ) self.log.info("*** George script:\n{}\n***".format(george_script)) # Execute geoge script - lib.execute_george_through_file(george_script) + execute_george_through_file(george_script) # Read output file lines = [] diff --git a/openpype/hosts/tvpaint/plugins/load/load_workfile.py b/openpype/hosts/tvpaint/plugins/load/load_workfile.py index a99b300730..fc7588f56e 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_workfile.py +++ b/openpype/hosts/tvpaint/plugins/load/load_workfile.py @@ -11,7 +11,13 @@ from openpype.pipeline.workfile import ( get_last_workfile_with_version, ) from openpype.pipeline.template_data import get_template_data_with_names -from openpype.hosts.tvpaint.api import lib, pipeline, plugin +from openpype.hosts.tvpaint.api import plugin +from openpype.hosts.tvpaint.api.lib import ( + execute_george_through_file, +) +from openpype.hosts.tvpaint.api.pipeline import ( + get_current_workfile_context, +) class LoadWorkfile(plugin.Loader): @@ -26,9 +32,9 @@ class LoadWorkfile(plugin.Loader): # Load context of current workfile as first thing # - which context and extension has host = registered_host() - current_file = host.current_file() + current_file = host.get_current_workfile() - context = pipeline.get_current_workfile_context() + context = get_current_workfile_context() filepath = self.fname.replace("\\", "/") @@ -40,7 +46,7 @@ class LoadWorkfile(plugin.Loader): george_script = "tv_LoadProject '\"'\"{}\"'\"'".format( filepath ) - lib.execute_george_through_file(george_script) + execute_george_through_file(george_script) # Save workfile. host_name = "tvpaint" @@ -69,12 +75,13 @@ class LoadWorkfile(plugin.Loader): file_template = anatomy.templates[template_key]["file"] # Define saving file extension + extensions = host.get_workfile_extensions() if current_file: # Match the extension of current file _, extension = os.path.splitext(current_file) else: # Fall back to the first extension supported for this host. - extension = host.file_extensions()[0] + extension = extensions[0] data["ext"] = extension @@ -83,7 +90,7 @@ class LoadWorkfile(plugin.Loader): folder_template, data ) version = get_last_workfile_with_version( - work_root, file_template, data, host.file_extensions() + work_root, file_template, data, extensions )[1] if version is None: @@ -97,4 +104,4 @@ class LoadWorkfile(plugin.Loader): file_template, data ) path = os.path.join(work_root, filename) - host.save_file(path) + host.save_workfile(path) diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py index 9b6d5c4879..ae1326a5bd 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py @@ -3,8 +3,8 @@ import copy import pyblish.api from openpype.client import get_asset_by_name -from openpype.lib import get_subset_name_with_asset_doc from openpype.pipeline import legacy_io +from openpype.pipeline.create import get_subset_name class CollectInstances(pyblish.api.ContextPlugin): @@ -107,13 +107,14 @@ class CollectInstances(pyblish.api.ContextPlugin): # Use empty variant value variant = "" task_name = legacy_io.Session["AVALON_TASK"] - new_subset_name = get_subset_name_with_asset_doc( + new_subset_name = get_subset_name( family, variant, task_name, asset_doc, project_name, - host_name + host_name, + project_settings=context.data["project_settings"] ) instance_data["subset"] = new_subset_name diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py index 20c5bb586a..92a2815ba0 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py @@ -3,7 +3,7 @@ import copy import pyblish.api from openpype.client import get_asset_by_name -from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectRenderScene(pyblish.api.ContextPlugin): @@ -75,14 +75,15 @@ class CollectRenderScene(pyblish.api.ContextPlugin): dynamic_data["render_pass"] = dynamic_data["renderpass"] task_name = workfile_context["task"] - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( "render", variant, task_name, asset_doc, project_name, host_name, - dynamic_data=dynamic_data + dynamic_data=dynamic_data, + project_settings=context.data["project_settings"] ) instance_data = { diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py index 88c5f4dbc7..8c7c8c3899 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py @@ -3,8 +3,8 @@ import json import pyblish.api from openpype.client import get_asset_by_name -from openpype.lib import get_subset_name_with_asset_doc from openpype.pipeline import legacy_io +from openpype.pipeline.create import get_subset_name class CollectWorkfile(pyblish.api.ContextPlugin): @@ -39,13 +39,14 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # Use empty variant value variant = "" task_name = legacy_io.Session["AVALON_TASK"] - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( family, variant, task_name, asset_doc, project_name, - host_name + host_name, + project_settings=context.data["project_settings"] ) # Create Workfile instance diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py index c59ef82f85..8fe71a4a46 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py @@ -5,7 +5,22 @@ import tempfile import pyblish.api from openpype.pipeline import legacy_io -from openpype.hosts.tvpaint.api import pipeline, lib +from openpype.hosts.tvpaint.api.lib import ( + execute_george, + execute_george_through_file, + get_layers_data, + get_groups_data, +) +from openpype.hosts.tvpaint.api.pipeline import ( + SECTION_NAME_CONTEXT, + SECTION_NAME_INSTANCES, + SECTION_NAME_CONTAINERS, + + get_workfile_metadata_string, + write_workfile_metadata, + get_current_workfile_context, + list_instances, +) class ResetTVPaintWorkfileMetadata(pyblish.api.Action): @@ -15,12 +30,12 @@ class ResetTVPaintWorkfileMetadata(pyblish.api.Action): def process(self, context, plugin): metadata_keys = { - pipeline.SECTION_NAME_CONTEXT: {}, - pipeline.SECTION_NAME_INSTANCES: [], - pipeline.SECTION_NAME_CONTAINERS: [] + SECTION_NAME_CONTEXT: {}, + SECTION_NAME_INSTANCES: [], + SECTION_NAME_CONTAINERS: [] } for metadata_key, default in metadata_keys.items(): - json_string = pipeline.get_workfile_metadata_string(metadata_key) + json_string = get_workfile_metadata_string(metadata_key) if not json_string: continue @@ -35,7 +50,7 @@ class ResetTVPaintWorkfileMetadata(pyblish.api.Action): ).format(metadata_key, default, json_string), exc_info=True ) - pipeline.write_workfile_metadata(metadata_key, default) + write_workfile_metadata(metadata_key, default) class CollectWorkfileData(pyblish.api.ContextPlugin): @@ -45,8 +60,8 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): actions = [ResetTVPaintWorkfileMetadata] def process(self, context): - current_project_id = lib.execute_george("tv_projectcurrentid") - lib.execute_george("tv_projectselect {}".format(current_project_id)) + current_project_id = execute_george("tv_projectcurrentid") + execute_george("tv_projectselect {}".format(current_project_id)) # Collect and store current context to have reference current_context = { @@ -60,7 +75,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): # Collect context from workfile metadata self.log.info("Collecting workfile context") - workfile_context = pipeline.get_current_workfile_context() + workfile_context = get_current_workfile_context() # Store workfile context to pyblish context context.data["workfile_context"] = workfile_context if workfile_context: @@ -96,7 +111,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): # Collect instances self.log.info("Collecting instance data from workfile") - instance_data = pipeline.list_instances() + instance_data = list_instances() context.data["workfileInstances"] = instance_data self.log.debug( "Instance data:\"{}".format(json.dumps(instance_data, indent=4)) @@ -104,7 +119,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): # Collect information about layers self.log.info("Collecting layers data from workfile") - layers_data = lib.layers_data() + layers_data = get_layers_data() layers_by_name = {} for layer in layers_data: layer_name = layer["name"] @@ -120,14 +135,14 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): # Collect information about groups self.log.info("Collecting groups data from workfile") - group_data = lib.groups_data() + group_data = get_groups_data() context.data["groupsData"] = group_data self.log.debug( "Group data:\"{}".format(json.dumps(group_data, indent=4)) ) self.log.info("Collecting scene data from workfile") - workfile_info_parts = lib.execute_george("tv_projectinfo").split(" ") + workfile_info_parts = execute_george("tv_projectinfo").split(" ") # Project frame start - not used workfile_info_parts.pop(-1) @@ -139,10 +154,10 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): workfile_path = " ".join(workfile_info_parts).replace("\"", "") # Marks return as "{frame - 1} {state} ", example "0 set". - result = lib.execute_george("tv_markin") + result = execute_george("tv_markin") mark_in_frame, mark_in_state, _ = result.split(" ") - result = lib.execute_george("tv_markout") + result = execute_george("tv_markout") mark_out_frame, mark_out_state, _ = result.split(" ") scene_data = { @@ -156,7 +171,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): "sceneMarkInState": mark_in_state == "set", "sceneMarkOut": int(mark_out_frame), "sceneMarkOutState": mark_out_state == "set", - "sceneStartFrame": int(lib.execute_george("tv_startframe")), + "sceneStartFrame": int(execute_george("tv_startframe")), "sceneBgColor": self._get_bg_color() } self.log.debug( @@ -188,7 +203,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): ] george_script = "\n".join(george_script_lines) - lib.execute_george_through_file(george_script) + execute_george_through_file(george_script) with open(output_filepath, "r") as stream: data = stream.read() diff --git a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py index 77712347bd..1ebaf1da64 100644 --- a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -5,7 +5,13 @@ import tempfile from PIL import Image import pyblish.api -from openpype.hosts.tvpaint.api import lib + +from openpype.hosts.tvpaint.api.lib import ( + execute_george, + execute_george_through_file, + get_layers_pre_post_behavior, + get_layers_exposure_frames, +) from openpype.hosts.tvpaint.lib import ( calculate_layers_extraction_data, get_frame_filename_template, @@ -61,7 +67,7 @@ class ExtractSequence(pyblish.api.Extractor): # different way when Start Frame is not `0` # NOTE It will be set back after rendering scene_start_frame = instance.context.data["sceneStartFrame"] - lib.execute_george("tv_startframe 0") + execute_george("tv_startframe 0") # Frame start/end may be stored as float frame_start = int(instance.data["frameStart"]) @@ -113,7 +119,7 @@ class ExtractSequence(pyblish.api.Extractor): output_filepaths_by_frame_idx, thumbnail_fullpath = result # Change scene frame Start back to previous value - lib.execute_george("tv_startframe {}".format(scene_start_frame)) + execute_george("tv_startframe {}".format(scene_start_frame)) # Sequence of one frame if not output_filepaths_by_frame_idx: @@ -241,7 +247,7 @@ class ExtractSequence(pyblish.api.Extractor): george_script_lines.append(" ".join(orig_color_command)) - lib.execute_george_through_file("\n".join(george_script_lines)) + execute_george_through_file("\n".join(george_script_lines)) first_frame_filepath = None output_filepaths_by_frame_idx = {} @@ -304,8 +310,8 @@ class ExtractSequence(pyblish.api.Extractor): return [], None self.log.debug("Collecting pre/post behavior of individual layers.") - behavior_by_layer_id = lib.get_layers_pre_post_behavior(layer_ids) - exposure_frames_by_layer_id = lib.get_layers_exposure_frames( + behavior_by_layer_id = get_layers_pre_post_behavior(layer_ids) + exposure_frames_by_layer_id = get_layers_exposure_frames( layer_ids, layers ) extraction_data_by_layer_id = calculate_layers_extraction_data( @@ -410,7 +416,7 @@ class ExtractSequence(pyblish.api.Extractor): ",".join(frames_to_render), layer_id, layer["name"] )) # Let TVPaint render layer's image - lib.execute_george_through_file("\n".join(george_script_lines)) + execute_george_through_file("\n".join(george_script_lines)) # Fill frames between `frame_start_index` and `frame_end_index` self.log.debug("Filling frames not rendered frames.") diff --git a/openpype/hosts/tvpaint/plugins/publish/increment_workfile_version.py b/openpype/hosts/tvpaint/plugins/publish/increment_workfile_version.py index 24d6558168..a85caf2557 100644 --- a/openpype/hosts/tvpaint/plugins/publish/increment_workfile_version.py +++ b/openpype/hosts/tvpaint/plugins/publish/increment_workfile_version.py @@ -1,7 +1,7 @@ import pyblish.api -from openpype.api import version_up -from openpype.hosts.tvpaint.api import workio +from openpype.lib import version_up +from openpype.pipeline import registered_host class IncrementWorkfileVersion(pyblish.api.ContextPlugin): @@ -17,6 +17,7 @@ class IncrementWorkfileVersion(pyblish.api.ContextPlugin): assert all(result["success"] for result in context.data["results"]), ( "Publishing not successful so version is not increased.") + host = registered_host() path = context.data["currentFile"] - workio.save_file(version_up(path)) + host.save_workfile(version_up(path)) self.log.info('Incrementing workfile version') diff --git a/openpype/hosts/tvpaint/plugins/publish/validate_asset_name.py b/openpype/hosts/tvpaint/plugins/publish/validate_asset_name.py index 70816f9f18..7e35726030 100644 --- a/openpype/hosts/tvpaint/plugins/publish/validate_asset_name.py +++ b/openpype/hosts/tvpaint/plugins/publish/validate_asset_name.py @@ -1,6 +1,9 @@ import pyblish.api from openpype.pipeline import PublishXmlValidationError -from openpype.hosts.tvpaint.api import pipeline +from openpype.hosts.tvpaint.api.pipeline import ( + list_instances, + write_instances, +) class FixAssetNames(pyblish.api.Action): @@ -15,7 +18,7 @@ class FixAssetNames(pyblish.api.Action): def process(self, context, plugin): context_asset_name = context.data["asset"] - old_instance_items = pipeline.list_instances() + old_instance_items = list_instances() new_instance_items = [] for instance_item in old_instance_items: instance_asset_name = instance_item.get("asset") @@ -25,7 +28,7 @@ class FixAssetNames(pyblish.api.Action): ): instance_item["asset"] = context_asset_name new_instance_items.append(instance_item) - pipeline._write_instances(new_instance_items) + write_instances(new_instance_items) class ValidateAssetNames(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/tvpaint/plugins/publish/validate_marks.py b/openpype/hosts/tvpaint/plugins/publish/validate_marks.py index d1f299e006..12d50e17ff 100644 --- a/openpype/hosts/tvpaint/plugins/publish/validate_marks.py +++ b/openpype/hosts/tvpaint/plugins/publish/validate_marks.py @@ -2,7 +2,7 @@ import json import pyblish.api from openpype.pipeline import PublishXmlValidationError -from openpype.hosts.tvpaint.api import lib +from openpype.hosts.tvpaint.api.lib import execute_george class ValidateMarksRepair(pyblish.api.Action): @@ -15,10 +15,10 @@ class ValidateMarksRepair(pyblish.api.Action): def process(self, context, plugin): expected_data = ValidateMarks.get_expected_data(context) - lib.execute_george( + execute_george( "tv_markin {} set".format(expected_data["markIn"]) ) - lib.execute_george( + execute_george( "tv_markout {} set".format(expected_data["markOut"]) ) diff --git a/openpype/hosts/tvpaint/plugins/publish/validate_start_frame.py b/openpype/hosts/tvpaint/plugins/publish/validate_start_frame.py index ddc738c6ed..066e54c670 100644 --- a/openpype/hosts/tvpaint/plugins/publish/validate_start_frame.py +++ b/openpype/hosts/tvpaint/plugins/publish/validate_start_frame.py @@ -1,6 +1,6 @@ import pyblish.api from openpype.pipeline import PublishXmlValidationError -from openpype.hosts.tvpaint.api import lib +from openpype.hosts.tvpaint.api.lib import execute_george class RepairStartFrame(pyblish.api.Action): @@ -11,7 +11,7 @@ class RepairStartFrame(pyblish.api.Action): on = "failed" def process(self, context, plugin): - lib.execute_george("tv_startframe 0") + execute_george("tv_startframe 0") class ValidateStartFrame(pyblish.api.ContextPlugin): @@ -24,7 +24,7 @@ class ValidateStartFrame(pyblish.api.ContextPlugin): optional = True def process(self, context): - start_frame = lib.execute_george("tv_startframe") + start_frame = execute_george("tv_startframe") if start_frame == 0: return diff --git a/openpype/hosts/tvpaint/plugins/publish/validate_workfile_metadata.py b/openpype/hosts/tvpaint/plugins/publish/validate_workfile_metadata.py index eac345f395..d66ae50c60 100644 --- a/openpype/hosts/tvpaint/plugins/publish/validate_workfile_metadata.py +++ b/openpype/hosts/tvpaint/plugins/publish/validate_workfile_metadata.py @@ -1,6 +1,5 @@ import pyblish.api -from openpype.pipeline import PublishXmlValidationError -from openpype.hosts.tvpaint.api import save_file +from openpype.pipeline import PublishXmlValidationError, registered_host class ValidateWorkfileMetadataRepair(pyblish.api.Action): @@ -13,8 +12,9 @@ class ValidateWorkfileMetadataRepair(pyblish.api.Action): def process(self, context, _plugin): """Save current workfile which should trigger storing of metadata.""" current_file = context.data["currentFile"] + host = registered_host() # Save file should trigger - save_file(current_file) + host.save_workfile(current_file) class ValidateWorkfileMetadata(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 1c785ab2ee..95c0a678bc 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -9,7 +9,7 @@ from abc import ABCMeta, abstractmethod, abstractproperty import six -from openpype.api import PypeLogger +from openpype.lib import Logger from openpype.modules import ModulesManager @@ -328,7 +328,7 @@ class TVPaintCommands: def log(self): """Access to logger object.""" if self._log is None: - self._log = PypeLogger.get_logger(self.__class__.__name__) + self._log = Logger.get_logger(self.__class__.__name__) return self._log @property diff --git a/openpype/hosts/unreal/__init__.py b/openpype/hosts/unreal/__init__.py index 41222f4f94..42dd8f0ac4 100644 --- a/openpype/hosts/unreal/__init__.py +++ b/openpype/hosts/unreal/__init__.py @@ -1,6 +1,6 @@ -from .module import UnrealModule +from .addon import UnrealAddon __all__ = ( - "UnrealModule", + "UnrealAddon", ) diff --git a/openpype/hosts/unreal/module.py b/openpype/hosts/unreal/addon.py similarity index 88% rename from openpype/hosts/unreal/module.py rename to openpype/hosts/unreal/addon.py index aa08c8c130..16736214c5 100644 --- a/openpype/hosts/unreal/module.py +++ b/openpype/hosts/unreal/addon.py @@ -1,18 +1,18 @@ import os from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostModule +from openpype.modules.interfaces import IHostAddon UNREAL_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class UnrealModule(OpenPypeModule, IHostModule): +class UnrealAddon(OpenPypeModule, IHostAddon): name = "unreal" host_name = "unreal" def initialize(self, module_settings): self.enabled = True - def add_implementation_envs(self, env, app) -> None: + def add_implementation_envs(self, env, app): """Modify environments to contain all required for implementation.""" # Set OPENPYPE_UNREAL_PLUGIN required for Unreal implementation diff --git a/openpype/hosts/unreal/api/__init__.py b/openpype/hosts/unreal/api/__init__.py index 870982f5f9..3f96d8ac6f 100644 --- a/openpype/hosts/unreal/api/__init__.py +++ b/openpype/hosts/unreal/api/__init__.py @@ -1,10 +1,8 @@ # -*- coding: utf-8 -*- """Unreal Editor OpenPype host API.""" -from .plugin import ( - Loader, - Creator -) +from .plugin import Loader + from .pipeline import ( install, uninstall, @@ -25,7 +23,6 @@ from .pipeline import ( __all__ = [ "install", "uninstall", - "Creator", "Loader", "ls", "publish", diff --git a/openpype/hosts/unreal/api/plugin.py b/openpype/hosts/unreal/api/plugin.py index d8d2f2420d..6fc00cb71c 100644 --- a/openpype/hosts/unreal/api/plugin.py +++ b/openpype/hosts/unreal/api/plugin.py @@ -1,16 +1,7 @@ # -*- coding: utf-8 -*- from abc import ABC -from openpype.pipeline import ( - LegacyCreator, - LoaderPlugin, -) - - -class Creator(LegacyCreator): - """This serves as skeleton for future OpenPype specific functionality""" - defaults = ['Main'] - maintain_selection = False +from openpype.pipeline import LoaderPlugin class Loader(LoaderPlugin, ABC): diff --git a/openpype/hosts/unreal/hooks/pre_workfile_preparation.py b/openpype/hosts/unreal/hooks/pre_workfile_preparation.py index 50b34bd573..4ae72593e9 100644 --- a/openpype/hosts/unreal/hooks/pre_workfile_preparation.py +++ b/openpype/hosts/unreal/hooks/pre_workfile_preparation.py @@ -8,8 +8,8 @@ from openpype.lib import ( PreLaunchHook, ApplicationLaunchFailed, ApplicationNotFound, - get_workfile_template_key ) +from openpype.pipeline.workfile import get_workfile_template_key import openpype.hosts.unreal.lib as unreal_lib diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeStyle.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeStyle.cpp index 4a53af26b5..49e805da4d 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeStyle.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeStyle.cpp @@ -1,5 +1,5 @@ -#include "OpenPype.h" #include "OpenPypeStyle.h" +#include "OpenPype.h" #include "Framework/Application/SlateApplication.h" #include "Styling/SlateStyleRegistry.h" #include "Slate/SlateGameResources.h" diff --git a/openpype/hosts/unreal/lib.py b/openpype/hosts/unreal/lib.py index 8c453b38b9..d02c6de357 100644 --- a/openpype/hosts/unreal/lib.py +++ b/openpype/hosts/unreal/lib.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- """Unreal launching and project tools.""" -import sys + import os import platform import json @@ -9,7 +9,7 @@ import subprocess import re from pathlib import Path from collections import OrderedDict -from openpype.api import get_project_settings +from openpype.settings import get_project_settings def get_engine_versions(env=None): diff --git a/openpype/hosts/unreal/plugins/create/create_camera.py b/openpype/hosts/unreal/plugins/create/create_camera.py index 2842900834..bf1489d688 100644 --- a/openpype/hosts/unreal/plugins/create/create_camera.py +++ b/openpype/hosts/unreal/plugins/create/create_camera.py @@ -2,11 +2,11 @@ import unreal from unreal import EditorAssetLibrary as eal from unreal import EditorLevelLibrary as ell -from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api.pipeline import instantiate +from openpype.pipeline import LegacyCreator -class CreateCamera(plugin.Creator): +class CreateCamera(LegacyCreator): """Layout output for character rigs""" name = "layoutMain" diff --git a/openpype/hosts/unreal/plugins/create/create_layout.py b/openpype/hosts/unreal/plugins/create/create_layout.py index 5fef08ce2a..c1067b00d9 100644 --- a/openpype/hosts/unreal/plugins/create/create_layout.py +++ b/openpype/hosts/unreal/plugins/create/create_layout.py @@ -1,11 +1,11 @@ # -*- coding: utf-8 -*- from unreal import EditorLevelLibrary -from openpype.hosts.unreal.api import plugin +from openpype.pipeline import LegacyCreator from openpype.hosts.unreal.api.pipeline import instantiate -class CreateLayout(plugin.Creator): +class CreateLayout(LegacyCreator): """Layout output for character rigs.""" name = "layoutMain" diff --git a/openpype/hosts/unreal/plugins/create/create_look.py b/openpype/hosts/unreal/plugins/create/create_look.py index 12f6b70ae6..4abf3f6095 100644 --- a/openpype/hosts/unreal/plugins/create/create_look.py +++ b/openpype/hosts/unreal/plugins/create/create_look.py @@ -2,9 +2,10 @@ """Create look in Unreal.""" import unreal # noqa from openpype.hosts.unreal.api import pipeline, plugin +from openpype.pipeline import LegacyCreator -class CreateLook(plugin.Creator): +class CreateLook(LegacyCreator): """Shader connections defining shape look.""" name = "unrealLook" diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py index 950799cc10..a85d17421b 100644 --- a/openpype/hosts/unreal/plugins/create/create_render.py +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -1,10 +1,10 @@ import unreal from openpype.hosts.unreal.api import pipeline -from openpype.hosts.unreal.api.plugin import Creator +from openpype.pipeline import LegacyCreator -class CreateRender(Creator): +class CreateRender(LegacyCreator): """Create instance for sequence for rendering""" name = "unrealRender" diff --git a/openpype/hosts/unreal/plugins/create/create_staticmeshfbx.py b/openpype/hosts/unreal/plugins/create/create_staticmeshfbx.py index 601c2fae06..45d517d27d 100644 --- a/openpype/hosts/unreal/plugins/create/create_staticmeshfbx.py +++ b/openpype/hosts/unreal/plugins/create/create_staticmeshfbx.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- """Create Static Meshes as FBX geometry.""" import unreal # noqa -from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api.pipeline import ( instantiate, ) +from openpype.pipeline import LegacyCreator -class CreateStaticMeshFBX(plugin.Creator): +class CreateStaticMeshFBX(LegacyCreator): """Static FBX geometry.""" name = "unrealStaticMeshMain" diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py index 50e498dbb0..a5b9cbd1fc 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py +++ b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py @@ -20,15 +20,11 @@ class StaticMeshAlembicLoader(plugin.Loader): icon = "cube" color = "orange" - def get_task(self, filename, asset_dir, asset_name, replace): + @staticmethod + def get_task(filename, asset_dir, asset_name, replace, default_conversion): task = unreal.AssetImportTask() options = unreal.AbcImportSettings() sm_settings = unreal.AbcStaticMeshSettings() - conversion_settings = unreal.AbcConversionSettings( - preset=unreal.AbcConversionPreset.CUSTOM, - flip_u=False, flip_v=False, - rotation=[0.0, 0.0, 0.0], - scale=[1.0, 1.0, 1.0]) task.set_editor_property('filename', filename) task.set_editor_property('destination_path', asset_dir) @@ -44,13 +40,20 @@ class StaticMeshAlembicLoader(plugin.Loader): sm_settings.set_editor_property('merge_meshes', True) + if not default_conversion: + conversion_settings = unreal.AbcConversionSettings( + preset=unreal.AbcConversionPreset.CUSTOM, + flip_u=False, flip_v=False, + rotation=[0.0, 0.0, 0.0], + scale=[1.0, 1.0, 1.0]) + options.conversion_settings = conversion_settings + options.static_mesh_settings = sm_settings - options.conversion_settings = conversion_settings task.options = options return task - def load(self, context, name, namespace, data): + def load(self, context, name, namespace, options): """Load and containerise representation into Content Browser. This is two step process. First, import FBX to temporary path and @@ -82,6 +85,10 @@ class StaticMeshAlembicLoader(plugin.Loader): asset_name = "{}".format(name) version = context.get('version').get('name') + default_conversion = False + if options.get("default_conversion"): + default_conversion = options.get("default_conversion") + tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( f"{root}/{asset}/{name}_v{version:03d}", suffix="") @@ -91,7 +98,8 @@ class StaticMeshAlembicLoader(plugin.Loader): if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): unreal.EditorAssetLibrary.make_directory(asset_dir) - task = self.get_task(self.fname, asset_dir, asset_name, False) + task = self.get_task( + self.fname, asset_dir, asset_name, False, default_conversion) unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index 926c932a85..c1d66ddf2a 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -24,7 +24,7 @@ from openpype.pipeline import ( legacy_io, ) from openpype.pipeline.context_tools import get_current_project_asset -from openpype.api import get_current_project_settings +from openpype.settings import get_current_project_settings from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api import pipeline as unreal_pipeline diff --git a/openpype/hosts/unreal/plugins/load/load_layout_existing.py b/openpype/hosts/unreal/plugins/load/load_layout_existing.py new file mode 100644 index 0000000000..3ce99f8ef6 --- /dev/null +++ b/openpype/hosts/unreal/plugins/load/load_layout_existing.py @@ -0,0 +1,418 @@ +import json +from pathlib import Path + +import unreal +from unreal import EditorLevelLibrary + +from bson.objectid import ObjectId + +from openpype import pipeline +from openpype.pipeline import ( + discover_loader_plugins, + loaders_from_representation, + load_container, + get_representation_path, + AVALON_CONTAINER_ID, + legacy_io, +) +from openpype.api import get_current_project_settings +from openpype.hosts.unreal.api import plugin +from openpype.hosts.unreal.api import pipeline as upipeline + + +class ExistingLayoutLoader(plugin.Loader): + """ + Load Layout for an existing scene, and match the existing assets. + """ + + families = ["layout"] + representations = ["json"] + + label = "Load Layout on Existing Scene" + icon = "code-fork" + color = "orange" + ASSET_ROOT = "/Game/OpenPype" + + @staticmethod + def _create_container( + asset_name, asset_dir, asset, representation, parent, family + ): + container_name = f"{asset_name}_CON" + + container = None + if not unreal.EditorAssetLibrary.does_asset_exist( + f"{asset_dir}/{container_name}" + ): + container = upipeline.create_container(container_name, asset_dir) + else: + ar = unreal.AssetRegistryHelpers.get_asset_registry() + obj = ar.get_asset_by_object_path( + f"{asset_dir}/{container_name}.{container_name}") + container = obj.get_asset() + + data = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "asset": asset, + "namespace": asset_dir, + "container_name": container_name, + "asset_name": asset_name, + # "loader": str(self.__class__.__name__), + "representation": representation, + "parent": parent, + "family": family + } + + upipeline.imprint( + "{}/{}".format(asset_dir, container_name), data) + + return container.get_path_name() + + @staticmethod + def _get_current_level(): + ue_version = unreal.SystemLibrary.get_engine_version().split('.') + ue_major = ue_version[0] + + if ue_major == '4': + return EditorLevelLibrary.get_editor_world() + elif ue_major == '5': + return unreal.LevelEditorSubsystem().get_current_level() + + raise NotImplementedError( + f"Unreal version {ue_major} not supported") + + def _get_transform(self, ext, import_data, lasset): + conversion = unreal.Matrix.IDENTITY.transform() + fbx_tuning = unreal.Matrix.IDENTITY.transform() + + basis = unreal.Matrix( + lasset.get('basis')[0], + lasset.get('basis')[1], + lasset.get('basis')[2], + lasset.get('basis')[3] + ).transform() + transform = unreal.Matrix( + lasset.get('transform_matrix')[0], + lasset.get('transform_matrix')[1], + lasset.get('transform_matrix')[2], + lasset.get('transform_matrix')[3] + ).transform() + + # Check for the conversion settings. We cannot access + # the alembic conversion settings, so we assume that + # the maya ones have been applied. + if ext == '.fbx': + loc = import_data.import_translation + rot = import_data.import_rotation.to_vector() + scale = import_data.import_uniform_scale + conversion = unreal.Transform( + location=[loc.x, loc.y, loc.z], + rotation=[rot.x, rot.y, rot.z], + scale=[-scale, scale, scale] + ) + fbx_tuning = unreal.Transform( + rotation=[180.0, 0.0, 90.0], + scale=[1.0, 1.0, 1.0] + ) + elif ext == '.abc': + # This is the standard conversion settings for + # alembic files from Maya. + conversion = unreal.Transform( + location=[0.0, 0.0, 0.0], + rotation=[0.0, 0.0, 0.0], + scale=[1.0, -1.0, 1.0] + ) + + new_transform = (basis.inverse() * transform * basis) + return fbx_tuning * conversion.inverse() * new_transform + + def _spawn_actor(self, obj, lasset): + actor = EditorLevelLibrary.spawn_actor_from_object( + obj, unreal.Vector(0.0, 0.0, 0.0) + ) + + actor.set_actor_label(lasset.get('instance_name')) + smc = actor.get_editor_property('static_mesh_component') + mesh = smc.get_editor_property('static_mesh') + import_data = mesh.get_editor_property('asset_import_data') + filename = import_data.get_first_filename() + path = Path(filename) + + transform = self._get_transform( + path.suffix, import_data, lasset) + + actor.set_actor_transform(transform, False, True) + + @staticmethod + def _get_fbx_loader(loaders, family): + name = "" + if family == 'rig': + name = "SkeletalMeshFBXLoader" + elif family == 'model' or family == 'staticMesh': + name = "StaticMeshFBXLoader" + elif family == 'camera': + name = "CameraLoader" + + if name == "": + return None + + for loader in loaders: + if loader.__name__ == name: + return loader + + return None + + @staticmethod + def _get_abc_loader(loaders, family): + name = "" + if family == 'rig': + name = "SkeletalMeshAlembicLoader" + elif family == 'model': + name = "StaticMeshAlembicLoader" + + if name == "": + return None + + for loader in loaders: + if loader.__name__ == name: + return loader + + return None + + def _load_asset(self, representation, version, instance_name, family): + valid_formats = ['fbx', 'abc'] + + repr_data = legacy_io.find_one({ + "type": "representation", + "parent": ObjectId(version), + "name": {"$in": valid_formats} + }) + repr_format = repr_data.get('name') + + all_loaders = discover_loader_plugins() + loaders = loaders_from_representation( + all_loaders, representation) + + loader = None + + if repr_format == 'fbx': + loader = self._get_fbx_loader(loaders, family) + elif repr_format == 'abc': + loader = self._get_abc_loader(loaders, family) + + if not loader: + self.log.error(f"No valid loader found for {representation}") + return [] + + # This option is necessary to avoid importing the assets with a + # different conversion compared to the other assets. For ABC files, + # it is in fact impossible to access the conversion settings. So, + # we must assume that the Maya conversion settings have been applied. + options = { + "default_conversion": True + } + + assets = load_container( + loader, + representation, + namespace=instance_name, + options=options + ) + + return assets + + def _process(self, lib_path): + data = get_current_project_settings() + delete_unmatched = data["unreal"]["delete_unmatched_assets"] + + ar = unreal.AssetRegistryHelpers.get_asset_registry() + + actors = EditorLevelLibrary.get_all_level_actors() + + with open(lib_path, "r") as fp: + data = json.load(fp) + + layout_data = [] + + # Get all the representations in the JSON from the database. + for element in data: + if element.get('representation'): + layout_data.append(( + pipeline.legacy_io.find_one({ + "_id": ObjectId(element.get('representation')) + }), + element + )) + + containers = [] + actors_matched = [] + + for (repr_data, lasset) in layout_data: + if not repr_data: + raise AssertionError("Representation not found") + if not (repr_data.get('data') or + repr_data.get('data').get('path')): + raise AssertionError("Representation does not have path") + if not repr_data.get('context'): + raise AssertionError("Representation does not have context") + + # For every actor in the scene, check if it has a representation in + # those we got from the JSON. If so, create a container for it. + # Otherwise, remove it from the scene. + found = False + + for actor in actors: + if not actor.get_class().get_name() == 'StaticMeshActor': + continue + if actor in actors_matched: + continue + + # Get the original path of the file from which the asset has + # been imported. + smc = actor.get_editor_property('static_mesh_component') + mesh = smc.get_editor_property('static_mesh') + import_data = mesh.get_editor_property('asset_import_data') + filename = import_data.get_first_filename() + path = Path(filename) + + if (not path.name or + path.name not in repr_data.get('data').get('path')): + continue + + actor.set_actor_label(lasset.get('instance_name')) + + mesh_path = Path(mesh.get_path_name()).parent.as_posix() + + # Create the container for the asset. + asset = repr_data.get('context').get('asset') + subset = repr_data.get('context').get('subset') + container = self._create_container( + f"{asset}_{subset}", mesh_path, asset, + repr_data.get('_id'), repr_data.get('parent'), + repr_data.get('context').get('family') + ) + containers.append(container) + + # Set the transform for the actor. + transform = self._get_transform( + path.suffix, import_data, lasset) + actor.set_actor_transform(transform, False, True) + + actors_matched.append(actor) + found = True + break + + # If an actor has not been found for this representation, + # we check if it has been loaded already by checking all the + # loaded containers. If so, we add it to the scene. Otherwise, + # we load it. + if found: + continue + + all_containers = upipeline.ls() + + loaded = False + + for container in all_containers: + repr = container.get('representation') + + if not repr == str(repr_data.get('_id')): + continue + + asset_dir = container.get('namespace') + + filter = unreal.ARFilter( + class_names=["StaticMesh"], + package_paths=[asset_dir], + recursive_paths=False) + assets = ar.get_assets(filter) + + for asset in assets: + obj = asset.get_asset() + self._spawn_actor(obj, lasset) + + loaded = True + break + + # If the asset has not been loaded yet, we load it. + if loaded: + continue + + assets = self._load_asset( + lasset.get('representation'), + lasset.get('version'), + lasset.get('instance_name'), + lasset.get('family') + ) + + for asset in assets: + obj = ar.get_asset_by_object_path(asset).get_asset() + if not obj.get_class().get_name() == 'StaticMesh': + continue + self._spawn_actor(obj, lasset) + + break + + # Check if an actor was not matched to a representation. + # If so, remove it from the scene. + for actor in actors: + if not actor.get_class().get_name() == 'StaticMeshActor': + continue + if actor not in actors_matched: + self.log.warning(f"Actor {actor.get_name()} not matched.") + if delete_unmatched: + EditorLevelLibrary.destroy_actor(actor) + + return containers + + def load(self, context, name, namespace, options): + print("Loading Layout and Match Assets") + + asset = context.get('asset').get('name') + asset_name = f"{asset}_{name}" if asset else name + container_name = f"{asset}_{name}_CON" + + curr_level = self._get_current_level() + + if not curr_level: + raise AssertionError("Current level not saved") + + containers = self._process(self.fname) + + curr_level_path = Path( + curr_level.get_outer().get_path_name()).parent.as_posix() + + if not unreal.EditorAssetLibrary.does_asset_exist( + f"{curr_level_path}/{container_name}" + ): + upipeline.create_container( + container=container_name, path=curr_level_path) + + data = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "asset": asset, + "namespace": curr_level_path, + "container_name": container_name, + "asset_name": asset_name, + "loader": str(self.__class__.__name__), + "representation": context["representation"]["_id"], + "parent": context["representation"]["parent"], + "family": context["representation"]["context"]["family"], + "loaded_assets": containers + } + upipeline.imprint(f"{curr_level_path}/{container_name}", data) + + def update(self, container, representation): + asset_dir = container.get('namespace') + + source_path = get_representation_path(representation) + containers = self._process(source_path) + + data = { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]), + "loaded_assets": containers + } + upipeline.imprint( + "{}/{}".format(asset_dir, container.get('container_name')), data) diff --git a/openpype/hosts/unreal/plugins/publish/extract_camera.py b/openpype/hosts/unreal/plugins/publish/extract_camera.py index ce53824563..4e37cc6a86 100644 --- a/openpype/hosts/unreal/plugins/publish/extract_camera.py +++ b/openpype/hosts/unreal/plugins/publish/extract_camera.py @@ -6,10 +6,10 @@ import unreal from unreal import EditorAssetLibrary as eal from unreal import EditorLevelLibrary as ell -import openpype.api +from openpype.pipeline import publish -class ExtractCamera(openpype.api.Extractor): +class ExtractCamera(publish.Extractor): """Extract a camera.""" label = "Extract Camera" diff --git a/openpype/hosts/unreal/plugins/publish/extract_layout.py b/openpype/hosts/unreal/plugins/publish/extract_layout.py index 8924df36a7..cac7991f00 100644 --- a/openpype/hosts/unreal/plugins/publish/extract_layout.py +++ b/openpype/hosts/unreal/plugins/publish/extract_layout.py @@ -3,18 +3,15 @@ import os import json import math -from bson.objectid import ObjectId - import unreal from unreal import EditorLevelLibrary as ell from unreal import EditorAssetLibrary as eal from openpype.client import get_representation_by_name -import openpype.api -from openpype.pipeline import legacy_io +from openpype.pipeline import legacy_io, publish -class ExtractLayout(openpype.api.Extractor): +class ExtractLayout(publish.Extractor): """Extract a layout.""" label = "Extract Layout" diff --git a/openpype/hosts/unreal/plugins/publish/extract_look.py b/openpype/hosts/unreal/plugins/publish/extract_look.py index ea39949417..f999ad8651 100644 --- a/openpype/hosts/unreal/plugins/publish/extract_look.py +++ b/openpype/hosts/unreal/plugins/publish/extract_look.py @@ -5,10 +5,10 @@ import os import unreal from unreal import MaterialEditingLibrary as mat_lib -import openpype.api +from openpype.pipeline import publish -class ExtractLook(openpype.api.Extractor): +class ExtractLook(publish.Extractor): """Extract look.""" label = "Extract Look" diff --git a/openpype/hosts/unreal/plugins/publish/extract_render.py b/openpype/hosts/unreal/plugins/publish/extract_render.py index 37fe7e916f..8ff38fbee0 100644 --- a/openpype/hosts/unreal/plugins/publish/extract_render.py +++ b/openpype/hosts/unreal/plugins/publish/extract_render.py @@ -2,10 +2,10 @@ from pathlib import Path import unreal -import openpype.api +from openpype.pipeline import publish -class ExtractRender(openpype.api.Extractor): +class ExtractRender(publish.Extractor): """Extract render.""" label = "Extract Render" diff --git a/openpype/hosts/webpublisher/__init__.py b/openpype/hosts/webpublisher/__init__.py index e69de29bb2..4e918c5d7d 100644 --- a/openpype/hosts/webpublisher/__init__.py +++ b/openpype/hosts/webpublisher/__init__.py @@ -0,0 +1,10 @@ +from .addon import ( + WebpublisherAddon, + WEBPUBLISHER_ROOT_DIR, +) + + +__all__ = ( + "WebpublisherAddon", + "WEBPUBLISHER_ROOT_DIR", +) diff --git a/openpype/hosts/webpublisher/addon.py b/openpype/hosts/webpublisher/addon.py new file mode 100644 index 0000000000..a64d74e62b --- /dev/null +++ b/openpype/hosts/webpublisher/addon.py @@ -0,0 +1,106 @@ +import os + +import click + +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostAddon + +WEBPUBLISHER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class WebpublisherAddon(OpenPypeModule, IHostAddon): + name = "webpublisher" + host_name = "webpublisher" + + def initialize(self, module_settings): + self.enabled = True + + def headless_publish(self, log, close_plugin_name=None, is_test=False): + """Runs publish in a opened host with a context. + + Close Python process at the end. + """ + + from openpype.pipeline.publish.lib import remote_publish + from .lib import get_webpublish_conn, publish_and_log + + if is_test: + remote_publish(log, close_plugin_name) + return + + dbcon = get_webpublish_conn() + _id = os.environ.get("BATCH_LOG_ID") + if not _id: + log.warning("Unable to store log records, " + "batch will be unfinished!") + return + + publish_and_log( + dbcon, _id, log, close_plugin_name=close_plugin_name + ) + + def cli(self, click_group): + click_group.add_command(cli_main) + + +@click.group( + WebpublisherAddon.name, + help="Webpublisher related commands.") +def cli_main(): + pass + + +@cli_main.command() +@click.argument("path") +@click.option("-u", "--user", help="User email address") +@click.option("-p", "--project", help="Project") +@click.option("-t", "--targets", help="Targets", default=None, + multiple=True) +def publish(project, path, user=None, targets=None): + """Start publishing (Inner command). + + Publish collects json from paths provided as an argument. + More than one path is allowed. + """ + + from .publish_functions import cli_publish + + cli_publish(project, path, user, targets) + + +@cli_main.command() +@click.argument("path") +@click.option("-p", "--project", help="Project") +@click.option("-h", "--host", help="Host") +@click.option("-u", "--user", help="User email address") +@click.option("-t", "--targets", help="Targets", default=None, + multiple=True) +def publishfromapp(project, path, host, user=None, targets=None): + """Start publishing through application (Inner command). + + Publish collects json from paths provided as an argument. + More than one path is allowed. + """ + + from .publish_functions import cli_publish_from_app + + cli_publish_from_app(project, path, host, user, targets) + + +@cli_main.command() +@click.option("-e", "--executable", help="Executable") +@click.option("-u", "--upload_dir", help="Upload dir") +@click.option("-h", "--host", help="Host", default=None) +@click.option("-p", "--port", help="Port", default=None) +def webserver(executable, upload_dir, host=None, port=None): + """Start service for communication with Webpublish Front end. + + OP must be congigured on a machine, eg. OPENPYPE_MONGO filled AND + FTRACK_BOT_API_KEY provided with api key from Ftrack. + + Expect "pype.club" user created on Ftrack. + """ + + from .webserver_service import run_webserver + + run_webserver(executable, upload_dir, host, port) diff --git a/openpype/hosts/webpublisher/api/__init__.py b/openpype/hosts/webpublisher/api/__init__.py index 18e3a16cf5..afea838e2c 100644 --- a/openpype/hosts/webpublisher/api/__init__.py +++ b/openpype/hosts/webpublisher/api/__init__.py @@ -1,31 +1,23 @@ import os import logging -from pyblish import api as pyblish -import openpype.hosts.webpublisher -from openpype.pipeline import legacy_io +import pyblish.api + +from openpype.host import HostBase +from openpype.hosts.webpublisher import WEBPUBLISHER_ROOT_DIR log = logging.getLogger("openpype.hosts.webpublisher") -HOST_DIR = os.path.dirname(os.path.abspath( - openpype.hosts.webpublisher.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") -PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") +class WebpublisherHost(HostBase): + name = "webpublisher" -def install(): - print("Installing Pype config...") + def install(self): + print("Installing Pype config...") + pyblish.api.register_host(self.name) - pyblish.register_plugin_path(PUBLISH_PATH) - log.info(PUBLISH_PATH) - - legacy_io.install() - - -def uninstall(): - pyblish.deregister_plugin_path(PUBLISH_PATH) - - -# to have required methods for interface -def ls(): - pass + publish_plugin_dir = os.path.join( + WEBPUBLISHER_ROOT_DIR, "plugins", "publish" + ) + pyblish.api.register_plugin_path(publish_plugin_dir) + self.log.info(publish_plugin_dir) diff --git a/openpype/lib/remote_publish.py b/openpype/hosts/webpublisher/lib.py similarity index 75% rename from openpype/lib/remote_publish.py rename to openpype/hosts/webpublisher/lib.py index b4b05c053b..4bc3f1db80 100644 --- a/openpype/lib/remote_publish.py +++ b/openpype/hosts/webpublisher/lib.py @@ -1,6 +1,7 @@ import os from datetime import datetime import collections +import json from bson.objectid import ObjectId @@ -8,9 +9,10 @@ import pyblish.util import pyblish.api from openpype.client.mongo import OpenPypeMongoConnection -from openpype.lib.plugin_tools import parse_json +from openpype.settings import get_project_settings +from openpype.lib import Logger from openpype.lib.profiles_filtering import filter_profiles -from openpype.api import get_project_settings +from openpype.pipeline.publish.lib import find_close_plugin ERROR_STATUS = "error" IN_PROGRESS_STATUS = "in_progress" @@ -19,21 +21,51 @@ SENT_REPROCESSING_STATUS = "sent_for_reprocessing" FINISHED_REPROCESS_STATUS = "republishing_finished" FINISHED_OK_STATUS = "finished_ok" +log = Logger.get_logger(__name__) -def headless_publish(log, close_plugin_name=None, is_test=False): - """Runs publish in a opened host with a context and closes Python process. + +def parse_json(path): + """Parses json file at 'path' location + + Returns: + (dict) or None if unparsable + Raises: + AsssertionError if 'path' doesn't exist """ - if not is_test: - dbcon = get_webpublish_conn() - _id = os.environ.get("BATCH_LOG_ID") - if not _id: - log.warning("Unable to store log records, " - "batch will be unfinished!") - return + path = path.strip('\"') + assert os.path.isfile(path), ( + "Path to json file doesn't exist. \"{}\"".format(path) + ) + data = None + with open(path, "r") as json_file: + try: + data = json.load(json_file) + except Exception as exc: + log.error( + "Error loading json: {} - Exception: {}".format(path, exc) + ) + return data - publish_and_log(dbcon, _id, log, close_plugin_name=close_plugin_name) + +def get_batch_asset_task_info(ctx): + """Parses context data from webpublisher's batch metadata + + Returns: + (tuple): asset, task_name (Optional), task_type + """ + task_type = "default_task_type" + task_name = None + asset = None + + if ctx["type"] == "task": + items = ctx["path"].split('/') + asset = items[-2] + task_name = ctx["name"] + task_type = ctx["attributes"]["type"] else: - publish(log, close_plugin_name) + asset = ctx["name"] + + return asset, task_name, task_type def get_webpublish_conn(): @@ -62,43 +94,13 @@ def start_webpublish_log(dbcon, batch_id, user): }).inserted_id -def publish(log, close_plugin_name=None, raise_error=False): - """Loops through all plugins, logs to console. Used for tests. - - Args: - log (OpenPypeLogger) - close_plugin_name (str): name of plugin with responsibility to - close host app - """ - # Error exit as soon as any error occurs. - error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}" - - close_plugin = _get_close_plugin(close_plugin_name, log) - - for result in pyblish.util.publish_iter(): - for record in result["records"]: - log.info("{}: {}".format( - result["plugin"].label, record.msg)) - - if result["error"]: - error_message = error_format.format(**result) - log.error(error_message) - if close_plugin: # close host app explicitly after error - context = pyblish.api.Context() - close_plugin().process(context) - if raise_error: - # Fatal Error is because of Deadline - error_message = "Fatal Error: " + error_format.format(**result) - raise RuntimeError(error_message) - - def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None): """Loops through all plugins, logs ok and fails into OP DB. Args: dbcon (OpenPypeMongoConnection) _id (str) - id of current job in DB - log (OpenPypeLogger) + log (openpype.lib.Logger) batch_id (str) - id sent from frontend close_plugin_name (str): name of plugin with responsibility to close host app @@ -107,7 +109,7 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None): error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}\n" error_format += "-" * 80 + "\n" - close_plugin = _get_close_plugin(close_plugin_name, log) + close_plugin = find_close_plugin(close_plugin_name, log) if isinstance(_id, str): _id = ObjectId(_id) @@ -226,16 +228,6 @@ def find_variant_key(application_manager, host): return found_variant_key -def _get_close_plugin(close_plugin_name, log): - if close_plugin_name: - plugins = pyblish.api.discover() - for plugin in plugins: - if plugin.__name__ == close_plugin_name: - return plugin - - log.debug("Close plugin not found, app might not close.") - - def get_task_data(batch_dir): """Return parsed data from first task manifest.json diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py index 9ff779636a..eb2737b276 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py @@ -13,12 +13,13 @@ import os import pyblish.api -from openpype.lib.plugin_tools import ( - parse_json, - get_batch_asset_task_info -) -from openpype.lib.remote_publish import get_webpublish_conn, IN_PROGRESS_STATUS from openpype.pipeline import legacy_io +from openpype_modules.webpublisher.lib import ( + parse_json, + get_batch_asset_task_info, + get_webpublish_conn, + IN_PROGRESS_STATUS +) class CollectBatchData(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 20e277d794..dd4646f356 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -23,10 +23,8 @@ from openpype.lib import ( get_ffprobe_streams, convert_ffprobe_fps_value, ) -from openpype.lib.plugin_tools import ( - parse_json, - get_subset_name_with_asset_doc -) +from openpype.pipeline.create import get_subset_name +from openpype_modules.webpublisher.lib import parse_json class CollectPublishedFiles(pyblish.api.ContextPlugin): @@ -39,6 +37,15 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): This is not applicable for 'studio' processing where host application is called to process uploaded workfile and render frames itself. + + For each task configure what properties should resulting instance have + based on uploaded files: + - uploading sequence of 'png' >> create instance of 'render' family, + by adding 'review' to 'Families' and 'Create review' to Tags it will + produce review. + + There might be difference between single(>>image) and sequence(>>render) + uploaded files. """ # must be really early, context values are only in json file order = pyblish.api.CollectorOrder - 0.490 @@ -48,6 +55,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): # from Settings task_type_to_family = [] + sync_next_version = False # find max version to be published, use for all def process(self, context): batch_dir = context.data["batchDir"] @@ -66,6 +74,9 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): task_type = context.data["taskType"] project_name = context.data["project_name"] variant = context.data["variant"] + + next_versions = [] + instances = [] for task_dir in task_subfolders: task_data = parse_json(os.path.join(task_dir, "manifest.json")) @@ -80,18 +91,26 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): is_sequence, extension.replace(".", '')) - subset_name = get_subset_name_with_asset_doc( - family, variant, task_name, asset_doc, - project_name=project_name, host_name="webpublisher" + subset_name = get_subset_name( + family, + variant, + task_name, + asset_doc, + project_name=project_name, + host_name="webpublisher", + project_settings=context.data["project_settings"] ) version = self._get_next_version( project_name, asset_doc, subset_name ) + next_versions.append(version) instance = context.create_instance(subset_name) instance.data["asset"] = asset_name instance.data["subset"] = subset_name + # set configurable result family instance.data["family"] = family + # set configurable additional families instance.data["families"] = families instance.data["version"] = version instance.data["stagingDir"] = tempfile.mkdtemp() @@ -134,8 +153,18 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): instance.data["handleStart"] = asset_doc["data"]["handleStart"] instance.data["handleEnd"] = asset_doc["data"]["handleEnd"] + instances.append(instance) self.log.info("instance.data:: {}".format(instance.data)) + if not self.sync_next_version: + return + + # overwrite specific version with same version for all + max_next_version = max(next_versions) + for inst in instances: + inst.data["version"] = max_next_version + self.log.debug("overwritten version:: {}".format(max_next_version)) + def _get_subset_name(self, family, subset_template, task_name, variant): fill_pairs = { "variant": variant, @@ -173,7 +202,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): "ext": ext[1:], "files": files, "stagingDir": task_dir, - "tags": tags + "tags": tags # configurable tags from Settings } self.log.info("sequences repre_data.data:: {}".format(repre_data)) return [repre_data] diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py index 92f581be5f..948e86c23e 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py @@ -10,7 +10,7 @@ import re import copy import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectTVPaintInstances(pyblish.api.ContextPlugin): @@ -47,13 +47,14 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): new_instances = [] # Workfile instance - workfile_subset_name = get_subset_name_with_asset_doc( + workfile_subset_name = get_subset_name( self.workfile_family, self.workfile_variant, task_name, asset_doc, project_name, - host_name + host_name, + project_settings=context.data["project_settings"] ) workfile_instance = self._create_workfile_instance( context, workfile_subset_name @@ -61,13 +62,14 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): new_instances.append(workfile_instance) # Review instance - review_subset_name = get_subset_name_with_asset_doc( + review_subset_name = get_subset_name( self.review_family, self.review_variant, task_name, asset_doc, project_name, - host_name + host_name, + project_settings=context.data["project_settings"] ) review_instance = self._create_review_instance( context, review_subset_name @@ -114,14 +116,15 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): "family": "render" } - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( self.render_pass_family, render_pass, task_name, asset_doc, project_name, host_name, - dynamic_data=dynamic_data + dynamic_data=dynamic_data, + project_settings=context.data["project_settings"] ) instance = self._create_render_pass_instance( @@ -137,14 +140,15 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): # Override family for subset name "family": "render" } - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( self.render_layer_family, variant, task_name, asset_doc, project_name, host_name, - dynamic_data=dynamic_data + dynamic_data=dynamic_data, + project_settings=context.data["project_settings"] ) instance = self._create_render_layer_instance( context, layers, subset_name diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py index f0f29260a2..b5f8ed9c8f 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py @@ -16,11 +16,11 @@ import uuid import json import shutil import pyblish.api -from openpype.lib.plugin_tools import parse_json from openpype.hosts.tvpaint.worker import ( SenderTVPaintCommands, CollectSceneData ) +from openpype_modules.webpublisher.lib import parse_json class CollectTVPaintWorkfileData(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/webpublisher/publish_functions.py b/openpype/hosts/webpublisher/publish_functions.py new file mode 100644 index 0000000000..83f53ced68 --- /dev/null +++ b/openpype/hosts/webpublisher/publish_functions.py @@ -0,0 +1,205 @@ +import os +import time +import pyblish.api +import pyblish.util + +from openpype.lib import Logger +from openpype.lib.applications import ( + ApplicationManager, + get_app_environments_for_context, +) +from openpype.pipeline import install_host +from openpype.hosts.webpublisher.api import WebpublisherHost + +from .lib import ( + get_batch_asset_task_info, + get_webpublish_conn, + start_webpublish_log, + publish_and_log, + fail_batch, + find_variant_key, + get_task_data, + get_timeout, + IN_PROGRESS_STATUS +) + + +def cli_publish(project_name, batch_path, user_email, targets): + """Start headless publishing. + + Used to publish rendered assets, workfiles etc via Webpublisher. + Eventually should be yanked out to Webpublisher cli. + + Publish use json from passed paths argument. + + Args: + project_name (str): project to publish (only single context is + expected per call of remotepublish + batch_path (str): Path batch folder. Contains subfolders with + resources (workfile, another subfolder 'renders' etc.) + user_email (string): email address for webpublisher - used to + find Ftrack user with same email + targets (list): Pyblish targets + (to choose validator for example) + + Raises: + RuntimeError: When there is no path to process. + """ + + if not batch_path: + raise RuntimeError("No publish paths specified") + + log = Logger.get_logger("remotepublish") + log.info("remotepublish command") + + # Register target and host + webpublisher_host = WebpublisherHost() + + os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path + os.environ["AVALON_PROJECT"] = project_name + os.environ["AVALON_APP"] = webpublisher_host.name + os.environ["USER_EMAIL"] = user_email + os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib + + if targets: + if isinstance(targets, str): + targets = [targets] + for target in targets: + pyblish.api.register_target(target) + + install_host(webpublisher_host) + + log.info("Running publish ...") + + _, batch_id = os.path.split(batch_path) + dbcon = get_webpublish_conn() + _id = start_webpublish_log(dbcon, batch_id, user_email) + + task_data = get_task_data(batch_path) + if not task_data["context"]: + msg = "Batch manifest must contain context data" + msg += "Create new batch and set context properly." + fail_batch(_id, dbcon, msg) + + publish_and_log(dbcon, _id, log, batch_id=batch_id) + + log.info("Publish finished.") + + +def cli_publish_from_app( + project_name, batch_path, host_name, user_email, targets +): + """Opens installed variant of 'host' and run remote publish there. + + Eventually should be yanked out to Webpublisher cli. + + Currently implemented and tested for Photoshop where customer + wants to process uploaded .psd file and publish collected layers + from there. Triggered by Webpublisher. + + Checks if no other batches are running (status =='in_progress). If + so, it sleeps for SLEEP (this is separate process), + waits for WAIT_FOR seconds altogether. + + Requires installed host application on the machine. + + Runs publish process as user would, in automatic fashion. + + Args: + project_name (str): project to publish (only single context is + expected per call of remotepublish + batch_path (str): Path batch folder. Contains subfolders with + resources (workfile, another subfolder 'renders' etc.) + host_name (str): 'photoshop' + user_email (string): email address for webpublisher - used to + find Ftrack user with same email + targets (list): Pyblish targets + (to choose validator for example) + """ + + log = Logger.get_logger("RemotePublishFromApp") + + log.info("remotepublishphotoshop command") + + task_data = get_task_data(batch_path) + + workfile_path = os.path.join(batch_path, + task_data["task"], + task_data["files"][0]) + + print("workfile_path {}".format(workfile_path)) + + batch_id = task_data["batch"] + dbcon = get_webpublish_conn() + # safer to start logging here, launch might be broken altogether + _id = start_webpublish_log(dbcon, batch_id, user_email) + + batches_in_progress = list(dbcon.find({"status": IN_PROGRESS_STATUS})) + if len(batches_in_progress) > 1: + running_batches = [str(batch["_id"]) + for batch in batches_in_progress + if batch["_id"] != _id] + msg = "There are still running batches {}\n". \ + format("\n".join(running_batches)) + msg += "Ask admin to check them and reprocess current batch" + fail_batch(_id, dbcon, msg) + + if not task_data["context"]: + msg = "Batch manifest must contain context data" + msg += "Create new batch and set context properly." + fail_batch(_id, dbcon, msg) + + asset_name, task_name, task_type = get_batch_asset_task_info( + task_data["context"]) + + application_manager = ApplicationManager() + found_variant_key = find_variant_key(application_manager, host_name) + app_name = "{}/{}".format(host_name, found_variant_key) + + # must have for proper launch of app + env = get_app_environments_for_context( + project_name, + asset_name, + task_name, + app_name + ) + print("env:: {}".format(env)) + os.environ.update(env) + + os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path + # must pass identifier to update log lines for a batch + os.environ["BATCH_LOG_ID"] = str(_id) + os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib + os.environ["USER_EMAIL"] = user_email + + pyblish.api.register_host(host_name) + if targets: + if isinstance(targets, str): + targets = [targets] + current_targets = os.environ.get("PYBLISH_TARGETS", "").split( + os.pathsep) + for target in targets: + current_targets.append(target) + + os.environ["PYBLISH_TARGETS"] = os.pathsep.join( + set(current_targets)) + + data = { + "last_workfile_path": workfile_path, + "start_last_workfile": True, + "project_name": project_name, + "asset_name": asset_name, + "task_name": task_name + } + + launched_app = application_manager.launch(app_name, **data) + + timeout = get_timeout(project_name, host_name, task_type) + + time_start = time.time() + while launched_app.poll() is None: + time.sleep(0.5) + if time.time() - time_start > timeout: + launched_app.terminate() + msg = "Timeout reached" + fail_batch(_id, dbcon, msg) diff --git a/openpype/hosts/webpublisher/webserver_service/__init__.py b/openpype/hosts/webpublisher/webserver_service/__init__.py new file mode 100644 index 0000000000..73111d286e --- /dev/null +++ b/openpype/hosts/webpublisher/webserver_service/__init__.py @@ -0,0 +1,6 @@ +from .webserver import run_webserver + + +__all__ = ( + "run_webserver", +) diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index 6444a5191d..4039d2c8ec 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -10,20 +10,19 @@ from aiohttp.web_response import Response from openpype.client import ( get_projects, get_assets, - OpenPypeMongoConnection, ) -from openpype.lib import ( - PypeLogger, -) -from openpype.lib.remote_publish import ( +from openpype.lib import Logger +from openpype.settings import get_project_settings +from openpype_modules.webserver.base_routes import RestApiEndpoint +from openpype_modules.webpublisher import WebpublisherAddon +from openpype_modules.webpublisher.lib import ( + get_webpublish_conn, get_task_data, ERROR_STATUS, REPROCESS_STATUS ) -from openpype.settings import get_project_settings -from openpype_modules.webserver.base_routes import RestApiEndpoint -log = PypeLogger.get_logger("WebpublishRoutes") +log = Logger.get_logger("WebpublishRoutes") class ResourceRestApiEndpoint(RestApiEndpoint): @@ -79,9 +78,7 @@ class WebpublishRestApiResource(JsonApiResource): """Resource carrying OP DB connection for storing batch info into DB.""" def __init__(self): - mongo_client = OpenPypeMongoConnection.get_mongo_client() - database_name = os.environ["OPENPYPE_DATABASE_NAME"] - self.dbcon = mongo_client[database_name]["webpublishes"] + self.dbcon = get_webpublish_conn() class ProjectsEndpoint(ResourceRestApiEndpoint): @@ -217,7 +214,7 @@ class BatchPublishEndpoint(WebpublishApiEndpoint): # TVPaint filter { "extensions": [".tvpp"], - "command": "remotepublish", + "command": "publish", "arguments": { "targets": ["tvpaint_worker"] }, @@ -226,13 +223,13 @@ class BatchPublishEndpoint(WebpublishApiEndpoint): # Photoshop filter { "extensions": [".psd", ".psb"], - "command": "remotepublishfromapp", + "command": "publishfromapp", "arguments": { - # Command 'remotepublishfromapp' requires --host argument + # Command 'publishfromapp' requires --host argument "host": "photoshop", # Make sure targets are set to None for cases that default # would change - # - targets argument is not used in 'remotepublishfromapp' + # - targets argument is not used in 'publishfromapp' "targets": ["remotepublish"] }, # does publish need to be handled by a queue, eg. only @@ -244,7 +241,7 @@ class BatchPublishEndpoint(WebpublishApiEndpoint): batch_dir = os.path.join(self.resource.upload_dir, content["batch"]) # Default command and arguments - command = "remotepublish" + command = "publish" add_args = { # All commands need 'project' and 'user' "project": content["project_name"], @@ -275,6 +272,8 @@ class BatchPublishEndpoint(WebpublishApiEndpoint): args = [ openpype_app, + "module", + WebpublisherAddon.name, command, batch_dir ] diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver.py similarity index 91% rename from openpype/hosts/webpublisher/webserver_service/webserver_cli.py rename to openpype/hosts/webpublisher/webserver_service/webserver.py index 6620e5d5cf..093b53d9d3 100644 --- a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py +++ b/openpype/hosts/webpublisher/webserver_service/webserver.py @@ -7,7 +7,14 @@ import json import subprocess from openpype.client import OpenPypeMongoConnection -from openpype.lib import PypeLogger +from openpype.modules import ModulesManager +from openpype.lib import Logger + +from openpype_modules.webpublisher.lib import ( + ERROR_STATUS, + REPROCESS_STATUS, + SENT_REPROCESSING_STATUS +) from .webpublish_routes import ( RestApiResource, @@ -21,32 +28,29 @@ from .webpublish_routes import ( TaskPublishEndpoint, UserReportEndpoint ) -from openpype.lib.remote_publish import ( - ERROR_STATUS, - REPROCESS_STATUS, - SENT_REPROCESSING_STATUS -) + +log = Logger.get_logger("webserver_gui") -log = PypeLogger.get_logger("webserver_gui") - - -def run_webserver(*args, **kwargs): +def run_webserver(executable, upload_dir, host=None, port=None): """Runs webserver in command line, adds routes.""" - from openpype.modules import ModulesManager + + if not host: + host = "localhost" + if not port: + port = 8079 manager = ModulesManager() webserver_module = manager.modules_by_name["webserver"] - host = kwargs.get("host") or "localhost" - port = kwargs.get("port") or 8079 + server_manager = webserver_module.create_new_server_manager(port, host) webserver_url = server_manager.url # queue for remotepublishfromapp tasks studio_task_queue = collections.deque() resource = RestApiResource(server_manager, - upload_dir=kwargs["upload_dir"], - executable=kwargs["executable"], + upload_dir=upload_dir, + executable=executable, studio_task_queue=studio_task_queue) projects_endpoint = ProjectsEndpoint(resource) server_manager.add_route( @@ -111,7 +115,7 @@ def run_webserver(*args, **kwargs): last_reprocessed = time.time() while True: if time.time() - last_reprocessed > 20: - reprocess_failed(kwargs["upload_dir"], webserver_url) + reprocess_failed(upload_dir, webserver_url) last_reprocessed = time.time() if studio_task_queue: args = studio_task_queue.popleft() diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 3d3e425a86..a64b7c2911 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -189,11 +189,11 @@ from .plugin_tools import ( filter_pyblish_plugins, set_plugin_attributes_from_settings, source_hash, - get_unique_layer_name, - get_background_layers, ) from .path_tools import ( + format_file_size, + collect_frames, create_hard_link, version_up, get_version_from_path, @@ -203,19 +203,6 @@ from .path_tools import ( get_project_basic_paths, ) -from .editorial import ( - is_overlapping_otio_ranges, - otio_range_to_frame_range, - otio_range_with_handles, - get_media_range_with_retimes, - convert_to_padded_path, - trim_media_range, - range_from_frames, - frames_to_secons, - frames_to_timecode, - make_sequence_collection -) - from .openpype_version import ( op_version_control_available, get_openpype_version, @@ -354,9 +341,9 @@ __all__ = [ "filter_pyblish_plugins", "set_plugin_attributes_from_settings", "source_hash", - "get_unique_layer_name", - "get_background_layers", + "format_file_size", + "collect_frames", "create_hard_link", "version_up", "get_version_from_path", @@ -383,16 +370,6 @@ __all__ = [ "validate_mongo_connection", "OpenPypeMongoConnection", - "is_overlapping_otio_ranges", - "otio_range_with_handles", - "convert_to_padded_path", - "otio_range_to_frame_range", - "get_media_range_with_retimes", - "trim_media_range", - "range_from_frames", - "frames_to_secons", - "frames_to_timecode", - "make_sequence_collection", "create_project_folders", "create_workdir_extra_folders", "get_project_basic_paths", diff --git a/openpype/lib/abstract_collect_render.py b/openpype/lib/abstract_collect_render.py deleted file mode 100644 index e4ff87aa0f..0000000000 --- a/openpype/lib/abstract_collect_render.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -"""Content was moved to 'openpype.pipeline.publish.abstract_collect_render'. - -Please change your imports as soon as possible. - -File will be probably removed in OpenPype 3.14.* -""" - -import warnings -from openpype.pipeline.publish import AbstractCollectRender, RenderInstance - - -class CollectRenderDeprecated(DeprecationWarning): - pass - - -warnings.simplefilter("always", CollectRenderDeprecated) -warnings.warn( - ( - "Content of 'abstract_collect_render' was moved." - "\nUsing deprecated source of 'abstract_collect_render'. Content was" - " move to 'openpype.pipeline.publish.abstract_collect_render'." - " Please change your imports as soon as possible." - ), - category=CollectRenderDeprecated, - stacklevel=4 -) - - -__all__ = ( - "AbstractCollectRender", - "RenderInstance" -) diff --git a/openpype/lib/abstract_expected_files.py b/openpype/lib/abstract_expected_files.py deleted file mode 100644 index f24d844fe5..0000000000 --- a/openpype/lib/abstract_expected_files.py +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -"""Content was moved to 'openpype.pipeline.publish.abstract_expected_files'. - -Please change your imports as soon as possible. - -File will be probably removed in OpenPype 3.14.* -""" - -import warnings -from openpype.pipeline.publish import ExpectedFiles - - -class ExpectedFilesDeprecated(DeprecationWarning): - pass - - -warnings.simplefilter("always", ExpectedFilesDeprecated) -warnings.warn( - ( - "Content of 'abstract_expected_files' was moved." - "\nUsing deprecated source of 'abstract_expected_files'. Content was" - " move to 'openpype.pipeline.publish.abstract_expected_files'." - " Please change your imports as soon as possible." - ), - category=ExpectedFilesDeprecated, - stacklevel=4 -) - - -__all__ = ( - "ExpectedFiles", -) diff --git a/openpype/lib/abstract_metaplugins.py b/openpype/lib/abstract_metaplugins.py deleted file mode 100644 index 346b5d86b3..0000000000 --- a/openpype/lib/abstract_metaplugins.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Content was moved to 'openpype.pipeline.publish.publish_plugins'. - -Please change your imports as soon as possible. - -File will be probably removed in OpenPype 3.14.* -""" - -import warnings -from openpype.pipeline.publish import ( - AbstractMetaInstancePlugin, - AbstractMetaContextPlugin -) - - -class MetaPluginsDeprecated(DeprecationWarning): - pass - - -warnings.simplefilter("always", MetaPluginsDeprecated) -warnings.warn( - ( - "Content of 'abstract_metaplugins' was moved." - "\nUsing deprecated source of 'abstract_metaplugins'. Content was" - " moved to 'openpype.pipeline.publish.publish_plugins'." - " Please change your imports as soon as possible." - ), - category=MetaPluginsDeprecated, - stacklevel=4 -) - - -__all__ = ( - "AbstractMetaInstancePlugin", - "AbstractMetaContextPlugin", -) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index 074e815160..990dc7495a 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -24,7 +24,7 @@ from openpype.settings.constants import ( METADATA_KEYS, M_DYNAMIC_KEY_LABEL ) -from . import PypeLogger +from .log import Logger from .profiles_filtering import filter_profiles from .local_settings import get_openpype_username @@ -138,7 +138,7 @@ def get_logger(): """Global lib.applications logger getter.""" global _logger if _logger is None: - _logger = PypeLogger.get_logger(__name__) + _logger = Logger.get_logger(__name__) return _logger @@ -373,7 +373,7 @@ class ApplicationManager: """ def __init__(self, system_settings=None): - self.log = PypeLogger.get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) self.app_groups = {} self.applications = {} @@ -469,6 +469,19 @@ class ApplicationManager: for tool in group: self.tools[tool.full_name] = tool + def find_latest_available_variant_for_group(self, group_name): + group = self.app_groups.get(group_name) + if group is None or not group.enabled: + return None + + output = None + for _, variant in reversed(sorted(group.variants.items())): + executable = variant.find_executable() + if executable: + output = variant + break + return output + def launch(self, app_name, **data): """Launch procedure. @@ -735,7 +748,7 @@ class LaunchHook: Always should be called """ - self.log = PypeLogger().get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) self.launch_context = launch_context @@ -877,7 +890,7 @@ class ApplicationLaunchContext: # Logger logger_name = "{}-{}".format(self.__class__.__name__, self.app_name) - self.log = PypeLogger.get_logger(logger_name) + self.log = Logger.get_logger(logger_name) self.executable = executable @@ -950,6 +963,63 @@ class ApplicationLaunchContext: ) self.kwargs["env"] = value + def _collect_addons_launch_hook_paths(self): + """Helper to collect application launch hooks from addons. + + Module have to have implemented 'get_launch_hook_paths' method which + can expect appliction as argument or nothing. + + Returns: + List[str]: Paths to launch hook directories. + """ + + expected_types = (list, tuple, set) + + output = [] + for module in self.modules_manager.get_enabled_modules(): + # Skip module if does not have implemented 'get_launch_hook_paths' + func = getattr(module, "get_launch_hook_paths", None) + if func is None: + continue + + func = module.get_launch_hook_paths + if hasattr(inspect, "signature"): + sig = inspect.signature(func) + expect_args = len(sig.parameters) > 0 + else: + expect_args = len(inspect.getargspec(func)[0]) > 0 + + # Pass application argument if method expect it. + try: + if expect_args: + hook_paths = func(self.application) + else: + hook_paths = func() + except Exception: + self.log.warning( + "Failed to call 'get_launch_hook_paths'", + exc_info=True + ) + continue + + if not hook_paths: + continue + + # Convert string to list + if isinstance(hook_paths, six.string_types): + hook_paths = [hook_paths] + + # Skip invalid types + if not isinstance(hook_paths, expected_types): + self.log.warning(( + "Result of `get_launch_hook_paths`" + " has invalid type {}. Expected {}" + ).format(type(hook_paths), expected_types)) + continue + + output.extend(hook_paths) + return output + def paths_to_launch_hooks(self): """Directory paths where to look for launch hooks.""" # This method has potential to be part of application manager (maybe). @@ -983,9 +1053,7 @@ class ApplicationLaunchContext: paths.append(path) # Load modules paths - paths.extend( - self.modules_manager.collect_launch_hook_paths(self.application) - ) + paths.extend(self._collect_addons_launch_hook_paths()) return paths @@ -1335,6 +1403,7 @@ def get_app_environments_for_context( "env": env }) + data["env"].update(anatomy.root_environments()) prepare_app_environments(data, env_group, modules_manager) prepare_context_environments(data, env_group, modules_manager) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index 17658eef93..bb0b07948f 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -3,11 +3,76 @@ import re import collections import uuid import json -from abc import ABCMeta, abstractmethod +from abc import ABCMeta, abstractmethod, abstractproperty import six import clique +# Global variable which store attribude definitions by type +# - default types are registered on import +_attr_defs_by_type = {} + + +def register_attr_def_class(cls): + """Register attribute definition. + + Currently are registered definitions used to deserialize data to objects. + + Attrs: + cls (AbtractAttrDef): Non-abstract class to be registered with unique + 'type' attribute. + + Raises: + KeyError: When type was already registered. + """ + + if cls.type in _attr_defs_by_type: + raise KeyError("Type \"{}\" was already registered".format(cls.type)) + _attr_defs_by_type[cls.type] = cls + + +def get_attributes_keys(attribute_definitions): + """Collect keys from list of attribute definitions. + + Args: + attribute_definitions (List[AbtractAttrDef]): Objects of attribute + definitions. + + Returns: + Set[str]: Keys that will be created using passed attribute definitions. + """ + + keys = set() + if not attribute_definitions: + return keys + + for attribute_def in attribute_definitions: + if not isinstance(attribute_def, UIDef): + keys.add(attribute_def.key) + return keys + + +def get_default_values(attribute_definitions): + """Receive default values for attribute definitions. + + Args: + attribute_definitions (List[AbtractAttrDef]): Attribute definitions for + which default values should be collected. + + Returns: + Dict[str, Any]: Default values for passet attribute definitions. + """ + + output = {} + if not attribute_definitions: + return output + + for attr_def in attribute_definitions: + # Skip UI definitions + if not isinstance(attr_def, UIDef): + output[attr_def.key] = attr_def.default + return output + class AbstractAttrDefMeta(ABCMeta): """Meta class to validate existence of 'key' attribute. @@ -47,6 +112,8 @@ class AbtractAttrDef: next to value input or ahead. """ + type_attributes = [] + is_value_def = True def __init__( @@ -72,6 +139,16 @@ class AbtractAttrDef: return False return self.key == other.key + @abstractproperty + def type(self): + """Attribute definition type also used as identifier of class. + + Returns: + str: Type of attribute definition. + """ + + pass + @abstractmethod def convert_value(self, value): """Convert value to a valid one. @@ -82,6 +159,35 @@ class AbtractAttrDef: pass + def serialize(self): + """Serialize object to data so it's possible to recreate it. + + Returns: + Dict[str, Any]: Serialized object that can be passed to + 'deserialize' method. + """ + + data = { + "type": self.type, + "key": self.key, + "label": self.label, + "tooltip": self.tooltip, + "default": self.default, + "is_label_horizontal": self.is_label_horizontal + } + for attr in self.type_attributes: + data[attr] = getattr(self, attr) + return data + + @classmethod + def deserialize(cls, data): + """Recreate object from data. + + Data can be received using 'serialize' method. + """ + + return cls(**data) + # ----------------------------------------- # UI attribute definitoins won't hold value @@ -98,10 +204,12 @@ class UIDef(AbtractAttrDef): class UISeparatorDef(UIDef): - pass + type = "separator" class UILabelDef(UIDef): + type = "label" + def __init__(self, label): super(UILabelDef, self).__init__(label=label) @@ -117,6 +225,8 @@ class UnknownDef(AbtractAttrDef): have known definition of type. """ + type = "unknown" + def __init__(self, key, default=None, **kwargs): kwargs["default"] = default super(UnknownDef, self).__init__(key, **kwargs) @@ -138,6 +248,13 @@ class NumberDef(AbtractAttrDef): default(int, float): Default value for conversion. """ + type = "number" + type_attributes = [ + "minimum", + "maximum", + "decimals" + ] + def __init__( self, key, minimum=None, maximum=None, decimals=None, default=None, **kwargs @@ -209,6 +326,12 @@ class TextDef(AbtractAttrDef): default(str, None): Default value. Empty string used when not defined. """ + type = "text" + type_attributes = [ + "multiline", + "placeholder", + ] + def __init__( self, key, multiline=None, regex=None, placeholder=None, default=None, **kwargs @@ -247,6 +370,11 @@ class TextDef(AbtractAttrDef): return value return self.default + def serialize(self): + data = super(TextDef, self).serialize() + data["regex"] = self.regex.pattern + return data + class EnumDef(AbtractAttrDef): """Enumeration of single item from items. @@ -258,6 +386,8 @@ class EnumDef(AbtractAttrDef): default: Default value. Must be one key(value) from passed items. """ + type = "enum" + def __init__(self, key, items, default=None, **kwargs): if not items: raise ValueError(( @@ -292,6 +422,11 @@ class EnumDef(AbtractAttrDef): return value return self.default + def serialize(self): + data = super(TextDef, self).serialize() + data["items"] = list(self.items) + return data + class BoolDef(AbtractAttrDef): """Boolean representation. @@ -300,6 +435,8 @@ class BoolDef(AbtractAttrDef): default(bool): Default value. Set to `False` if not defined. """ + type = "bool" + def __init__(self, key, default=None, **kwargs): if default is None: default = False @@ -542,6 +679,15 @@ class FileDef(AbtractAttrDef): default(str, List[str]): Default value. """ + type = "path" + type_attributes = [ + "single_item", + "folders", + "extensions", + "allow_sequences", + "extensions_label", + ] + def __init__( self, key, single_item=True, folders=None, extensions=None, allow_sequences=True, extensions_label=None, default=None, **kwargs @@ -632,3 +778,71 @@ class FileDef(AbtractAttrDef): if self.single_item: return FileDefItem.create_empty_item().to_dict() return [] + + +def serialize_attr_def(attr_def): + """Serialize attribute definition to data. + + Args: + attr_def (AbtractAttrDef): Attribute definition to serialize. + + Returns: + Dict[str, Any]: Serialized data. + """ + + return attr_def.serialize() + + +def serialize_attr_defs(attr_defs): + """Serialize attribute definitions to data. + + Args: + attr_defs (List[AbtractAttrDef]): Attribute definitions to serialize. + + Returns: + List[Dict[str, Any]]: Serialized data. + """ + + return [ + serialize_attr_def(attr_def) + for attr_def in attr_defs + ] + + +def deserialize_attr_def(attr_def_data): + """Deserialize attribute definition from data. + + Args: + attr_def (Dict[str, Any]): Attribute definition data to deserialize. + """ + + attr_type = attr_def_data.pop("type") + cls = _attr_defs_by_type[attr_type] + return cls.deserialize(attr_def_data) + + +def deserialize_attr_defs(attr_defs_data): + """Deserialize attribute definitions. + + Args: + List[Dict[str, Any]]: List of attribute definitions. + """ + + return [ + deserialize_attr_def(attr_def_data) + for attr_def_data in attr_defs_data + ] + + +# Register attribute definitions +for _attr_class in ( + UISeparatorDef, + UILabelDef, + UnknownDef, + NumberDef, + TextDef, + EnumDef, + BoolDef, + FileDef +): + register_attr_def_class(_attr_class) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index eed17fce9d..12f4a5198b 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1,12 +1,13 @@ """Should be used only inside of hosts.""" import os -import re import copy import platform import logging import functools import warnings +import six + from openpype.client import ( get_project, get_assets, @@ -14,8 +15,14 @@ from openpype.client import ( get_last_version_by_subset_name, get_workfile_info, ) +from openpype.client.operations import ( + CURRENT_ASSET_DOC_SCHEMA, + CURRENT_PROJECT_SCHEMA, + CURRENT_PROJECT_CONFIG_SCHEMA, + PROJECT_NAME_ALLOWED_SYMBOLS, + PROJECT_NAME_REGEX, +) from .profiles_filtering import filter_profiles -from .events import emit_event from .path_templates import StringTemplate legacy_io = None @@ -23,15 +30,13 @@ legacy_io = None log = logging.getLogger("AvalonContext") +# Backwards compatibility - should not be used anymore +# - Will be removed in OP 3.16.* CURRENT_DOC_SCHEMAS = { - "project": "openpype:project-3.0", - "asset": "openpype:asset-3.0", - "config": "openpype:config-2.0" + "project": CURRENT_PROJECT_SCHEMA, + "asset": CURRENT_ASSET_DOC_SCHEMA, + "config": CURRENT_PROJECT_CONFIG_SCHEMA } -PROJECT_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_" -PROJECT_NAME_REGEX = re.compile( - "^[{}]+$".format(PROJECT_NAME_ALLOWED_SYMBOLS) -) class AvalonContextDeprecatedWarning(DeprecationWarning): @@ -79,6 +84,7 @@ def deprecated(new_destination): return _decorator(func) +@deprecated("openpype.client.operations.create_project") def create_project( project_name, project_code, library_project=False, dbcon=None ): @@ -102,59 +108,14 @@ def create_project( Returns: dict: Created project document. + + Deprecated: + Function will be removed after release version 3.16.* """ - from openpype.settings import ProjectSettings, SaveWarningExc - from openpype.pipeline import AvalonMongoDB - from openpype.pipeline.schema import validate + from openpype.client.operations import create_project - if get_project(project_name, fields=["name"]): - raise ValueError("Project with name \"{}\" already exists".format( - project_name - )) - - if dbcon is None: - dbcon = AvalonMongoDB() - - if not PROJECT_NAME_REGEX.match(project_name): - raise ValueError(( - "Project name \"{}\" contain invalid characters" - ).format(project_name)) - - database = dbcon.database - project_doc = { - "type": "project", - "name": project_name, - "data": { - "code": project_code, - "library_project": library_project - }, - "schema": CURRENT_DOC_SCHEMAS["project"] - } - # Insert document with basic data - database[project_name].insert_one(project_doc) - # Load ProjectSettings for the project and save it to store all attributes - # and Anatomy - try: - project_settings_entity = ProjectSettings(project_name) - project_settings_entity.save() - except SaveWarningExc as exc: - print(str(exc)) - except Exception: - database[project_name].delete_one({"type": "project"}) - raise - - project_doc = get_project(project_name) - - try: - # Validate created project document - validate(project_doc) - except Exception: - # Remove project if is not valid - database[project_name].delete_one({"type": "project"}) - raise - - return project_doc + return create_project(project_name, project_code, library_project) def with_pipeline_io(func): @@ -178,7 +139,7 @@ def is_latest(representation): bool: Whether the representation is of latest version. Deprecated: - Function will be removed after release version 3.14.* + Function will be removed after release version 3.15.* """ from openpype.pipeline.context_tools import is_representation_from_latest @@ -191,7 +152,7 @@ def any_outdated(): """Return whether the current scene has any outdated content. Deprecated: - Function will be removed after release version 3.14.* + Function will be removed after release version 3.15.* """ from openpype.pipeline.load import any_outdated_containers @@ -212,7 +173,7 @@ def get_asset(asset_name=None): (MongoDB document) Deprecated: - Function will be removed after release version 3.14.* + Function will be removed after release version 3.15.* """ from openpype.pipeline.context_tools import get_current_project_asset @@ -224,13 +185,14 @@ def get_asset(asset_name=None): def get_system_general_anatomy_data(system_settings=None): """ Deprecated: - Function will be removed after release version 3.14.* + Function will be removed after release version 3.15.* """ from openpype.pipeline.template_data import get_general_template_data return get_general_template_data(system_settings) +@deprecated("openpype.client.get_linked_asset_ids") def get_linked_asset_ids(asset_doc): """Return linked asset ids for `asset_doc` from DB @@ -239,26 +201,20 @@ def get_linked_asset_ids(asset_doc): Returns: (list): MongoDB ids of input links. + + Deprecated: + Function will be removed after release version 3.16.* """ - output = [] - if not asset_doc: - return output - input_links = asset_doc["data"].get("inputLinks") or [] - if input_links: - for item in input_links: - # Backwards compatibility for "_id" key which was replaced with - # "id" - if "_id" in item: - link_id = item["_id"] - else: - link_id = item["id"] - output.append(link_id) + from openpype.client import get_linked_asset_ids + from openpype.pipeline import legacy_io - return output + project_name = legacy_io.active_project() + + return get_linked_asset_ids(project_name, asset_doc=asset_doc) -@with_pipeline_io +@deprecated("openpype.client.get_linked_assets") def get_linked_assets(asset_doc): """Return linked assets for `asset_doc` from DB @@ -267,14 +223,17 @@ def get_linked_assets(asset_doc): Returns: (list) Asset documents of input links for passed asset doc. + + Deprecated: + Function will be removed after release version 3.15.* """ - link_ids = get_linked_asset_ids(asset_doc) - if not link_ids: - return [] + from openpype.pipeline import legacy_io + from openpype.client import get_linked_assets project_name = legacy_io.active_project() - return list(get_assets(project_name, link_ids)) + + return get_linked_assets(project_name, asset_doc=asset_doc) @deprecated("openpype.client.get_last_version_by_subset_name") @@ -296,7 +255,7 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): dict: Last version document for entered. Deprecated: - Function will be removed after release version 3.14.* + Function will be removed after release version 3.15.* """ if not project_name: @@ -344,6 +303,9 @@ def get_workfile_template_key_from_context( Raises: ValueError: When both 'dbcon' and 'project_name' were not passed. + + Deprecated: + Function will be removed after release version 3.16.* """ from openpype.pipeline.workfile import ( @@ -387,6 +349,9 @@ def get_workfile_template_key( Raises: ValueError: When both 'project_name' and 'project_settings' were not passed. + + Deprecated: + Function will be removed after release version 3.16.* """ from openpype.pipeline.workfile import get_workfile_template_key @@ -411,7 +376,7 @@ def get_workdir_data(project_doc, asset_doc, task_name, host_name): dict: Data prepared for filling workdir template. Deprecated: - Function will be removed after release version 3.14.* + Function will be removed after release version 3.15.* """ from openpype.pipeline.template_data import get_template_data @@ -447,6 +412,9 @@ def get_workdir_with_workdir_data( Raises: ValueError: When both `anatomy` and `project_name` are set to None. + + Deprecated: + Function will be removed after release version 3.15.* """ if not anatomy and not project_name: @@ -492,6 +460,9 @@ def get_workdir( Returns: TemplateResult: Workdir path. + + Deprecated: + Function will be removed after release version 3.15.* """ from openpype.pipeline.workfile import get_workdir @@ -518,7 +489,7 @@ def template_data_from_session(session=None): dict: All available data from session. Deprecated: - Function will be removed after release version 3.14.* + Function will be removed after release version 3.15.* """ from openpype.pipeline.context_tools import get_template_data_from_session @@ -526,7 +497,7 @@ def template_data_from_session(session=None): return get_template_data_from_session(session) -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.compute_session_changes") def compute_session_changes( session, task=None, asset=None, app=None, template_key=None ): @@ -547,64 +518,49 @@ def compute_session_changes( Returns: dict: The required changes in the Session dictionary. + + Deprecated: + Function will be removed after release version 3.16.* """ - from openpype.pipeline.context_tools import get_workdir_from_session + from openpype.pipeline import legacy_io + from openpype.pipeline.context_tools import compute_session_changes - changes = dict() + if isinstance(asset, six.string_types): + project_name = legacy_io.active_project() + asset = get_asset_by_name(project_name, asset) - # If no changes, return directly - if not any([task, asset, app]): - return changes - - # Get asset document and asset - asset_document = None - asset_tasks = None - if isinstance(asset, dict): - # Assume asset database document - asset_document = asset - asset_tasks = asset_document.get("data", {}).get("tasks") - asset = asset["name"] - - if not asset_document or not asset_tasks: - # Assume asset name - project_name = session["AVALON_PROJECT"] - asset_document = get_asset_by_name( - project_name, asset, fields=["data.tasks"] - ) - assert asset_document, "Asset must exist" - - # Detect any changes compared session - mapping = { - "AVALON_ASSET": asset, - "AVALON_TASK": task, - "AVALON_APP": app, - } - changes = { - key: value - for key, value in mapping.items() - if value and value != session.get(key) - } - if not changes: - return changes - - # Compute work directory (with the temporary changed session so far) - _session = session.copy() - _session.update(changes) - - changes["AVALON_WORKDIR"] = get_workdir_from_session(_session) - - return changes + return compute_session_changes( + session, + asset, + task, + template_key + ) @deprecated("openpype.pipeline.context_tools.get_workdir_from_session") def get_workdir_from_session(session=None, template_key=None): + """Calculate workdir path based on session data. + + Args: + session (Union[None, Dict[str, str]]): Session to use. If not passed + current context session is used (from legacy_io). + template_key (Union[str, None]): Precalculate template key to define + workfile template name in Anatomy. + + Returns: + str: Workdir path. + + Deprecated: + Function will be removed after release version 3.16.* + """ + from openpype.pipeline.context_tools import get_workdir_from_session return get_workdir_from_session(session, template_key) -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.change_current_context") def update_current_task(task=None, asset=None, app=None, template_key=None): """Update active Session to a new task work area. @@ -617,35 +573,19 @@ def update_current_task(task=None, asset=None, app=None, template_key=None): Returns: dict: The changed key, values in the current Session. + + Deprecated: + Function will be removed after release version 3.16.* """ - changes = compute_session_changes( - legacy_io.Session, - task=task, - asset=asset, - app=app, - template_key=template_key - ) + from openpype.pipeline import legacy_io + from openpype.pipeline.context_tools import change_current_context - # Update the Session and environments. Pop from environments all keys with - # value set to None. - for key, value in changes.items(): - legacy_io.Session[key] = value - if value is None: - os.environ.pop(key, None) - else: - os.environ[key] = value + project_name = legacy_io.active_project() + if isinstance(asset, six.string_types): + asset = get_asset_by_name(project_name, asset) - data = changes.copy() - # Convert env keys to human readable keys - data["project_name"] = legacy_io.Session["AVALON_PROJECT"] - data["asset_name"] = legacy_io.Session["AVALON_ASSET"] - data["task_name"] = legacy_io.Session["AVALON_TASK"] - - # Emit session change - emit_event("taskChanged", data) - - return changes + return change_current_context(asset, task, template_key) @deprecated("openpype.client.get_workfile_info") @@ -664,6 +604,9 @@ def get_workfile_doc(asset_id, task_name, filename, dbcon=None): Returns: dict: Workfile document or None. + + Deprecated: + Function will be removed after release version 3.15.* """ # Use legacy_io if dbcon is not entered @@ -774,12 +717,17 @@ def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): @deprecated("openpype.pipeline.workfile.BuildWorkfile") def BuildWorkfile(): + """Build workfile class was moved to workfile pipeline. + + Deprecated: + Function will be removed after release version 3.16.* + """ from openpype.pipeline.workfile import BuildWorkfile return BuildWorkfile() -@with_pipeline_io +@deprecated("openpype.pipeline.create.get_legacy_creator_by_name") def get_creator_by_name(creator_name, case_sensitive=False): """Find creator plugin by name. @@ -790,23 +738,13 @@ def get_creator_by_name(creator_name, case_sensitive=False): Returns: Creator: Return first matching plugin or `None`. + + Deprecated: + Function will be removed after release version 3.16.* """ - from openpype.pipeline import discover_legacy_creator_plugins + from openpype.pipeline.create import get_legacy_creator_by_name - # Lower input creator name if is not case sensitive - if not case_sensitive: - creator_name = creator_name.lower() - - for creator_plugin in discover_legacy_creator_plugins(): - _creator_name = creator_plugin.__name__ - - # Lower creator plugin name if is not case sensitive - if not case_sensitive: - _creator_name = _creator_name.lower() - - if _creator_name == creator_name: - return creator_plugin - return None + return get_legacy_creator_by_name(creator_name, case_sensitive) @deprecated @@ -816,10 +754,7 @@ def change_timer_to_current_context(): Deprecated: This method is specific for TimersManager module so please use the functionality from there. Function will be removed after release - version 3.14.* - - TODO: - - use TimersManager's static method instead of reimplementing it here + version 3.15.* """ from openpype.pipeline import legacy_io @@ -934,6 +869,9 @@ def get_custom_workfile_template_by_context( Returns: str: Path to template or None if none of profiles match current context. (Existence of formatted path is not validated.) + + Deprecated: + Function will be removed after release version 3.16.* """ if anatomy is None: @@ -992,6 +930,9 @@ def get_custom_workfile_template_by_string_context( Returns: str: Path to template or None if none of profiles match current context. (Existence of formatted path is not validated.) + + Deprecated: + Function will be removed after release version 3.16.* """ project_name = None @@ -1026,6 +967,9 @@ def get_custom_workfile_template(template_profiles): Returns: str: Path to template or None if none of profiles match current context. (Existence of formatted path is not validated.) + + Deprecated: + Function will be removed after release version 3.16.* """ from openpype.pipeline import legacy_io @@ -1054,6 +998,9 @@ def get_last_workfile_with_version( Returns: tuple: Last workfile with version if there is any otherwise returns (None, None). + + Deprecated: + Function will be removed after release version 3.16.* """ from openpype.pipeline.workfile import get_last_workfile_with_version @@ -1080,6 +1027,9 @@ def get_last_workfile( Returns: str: Last or first workfile as filename of full path to filename. + + Deprecated: + Function will be removed after release version 3.16.* """ from openpype.pipeline.workfile import get_last_workfile @@ -1089,9 +1039,10 @@ def get_last_workfile( ) -@with_pipeline_io -def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, - link_type=None, max_depth=0): +@deprecated("openpype.client.get_linked_representation_id") +def get_linked_ids_for_representations( + project_name, repre_ids, dbcon=None, link_type=None, max_depth=0 +): """Returns list of linked ids of particular type (if provided). Goes from representations to version, back to representations @@ -1102,104 +1053,25 @@ def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, with Session. link_type (str): ['reference', '..] max_depth (int): limit how many levels of recursion + Returns: (list) of ObjectId - linked representations + + Deprecated: + Function will be removed after release version 3.16.* """ - # Create new dbcon if not passed and use passed project name - if not dbcon: - from openpype.pipeline import AvalonMongoDB - dbcon = AvalonMongoDB() - dbcon.Session["AVALON_PROJECT"] = project_name - # Validate that passed dbcon has same project - elif dbcon.Session["AVALON_PROJECT"] != project_name: - raise ValueError("Passed connection does not have right project") + + from openpype.client import get_linked_representation_id if not isinstance(repre_ids, list): repre_ids = [repre_ids] - version_ids = dbcon.distinct("parent", { - "_id": {"$in": repre_ids}, - "type": "representation" - }) - - match = { - "_id": {"$in": version_ids}, - "type": "version" - } - - graph_lookup = { - "from": project_name, - "startWith": "$data.inputLinks.id", - "connectFromField": "data.inputLinks.id", - "connectToField": "_id", - "as": "outputs_recursive", - "depthField": "depth" - } - if max_depth != 0: - # We offset by -1 since 0 basically means no recursion - # but the recursion only happens after the initial lookup - # for outputs. - graph_lookup["maxDepth"] = max_depth - 1 - - pipeline_ = [ - # Match - {"$match": match}, - # Recursive graph lookup for inputs - {"$graphLookup": graph_lookup} - ] - - result = dbcon.aggregate(pipeline_) - referenced_version_ids = _process_referenced_pipeline_result(result, - link_type) - - ref_ids = dbcon.distinct( - "_id", - filter={ - "parent": {"$in": list(referenced_version_ids)}, - "type": "representation" - } - ) - - return list(ref_ids) - - -def _process_referenced_pipeline_result(result, link_type): - """Filters result from pipeline for particular link_type. - - Pipeline cannot use link_type directly in a query. - Returns: - (list) - """ - referenced_version_ids = set() - correctly_linked_ids = set() - for item in result: - input_links = item["data"].get("inputLinks", []) - correctly_linked_ids = _filter_input_links(input_links, - link_type, - correctly_linked_ids) - - # outputs_recursive in random order, sort by depth - outputs_recursive = sorted(item.get("outputs_recursive", []), - key=lambda d: d["depth"]) - - for output in outputs_recursive: - if output["_id"] not in correctly_linked_ids: # leaf - continue - - correctly_linked_ids = _filter_input_links( - output["data"].get("inputLinks", []), - link_type, - correctly_linked_ids) - - referenced_version_ids.add(output["_id"]) - - return referenced_version_ids - - -def _filter_input_links(input_links, link_type, correctly_linked_ids): - for input_link in input_links: - if not link_type or input_link["type"] == link_type: - correctly_linked_ids.add(input_link.get("id") or - input_link.get("_id")) # legacy - - return correctly_linked_ids + output = [] + for repre_id in repre_ids: + output.extend(get_linked_representation_id( + project_name, + repre_id=repre_id, + link_type=link_type, + max_depth=max_depth + )) + return output diff --git a/openpype/lib/config.py b/openpype/lib/config.py deleted file mode 100644 index 26822649e4..0000000000 --- a/openpype/lib/config.py +++ /dev/null @@ -1,41 +0,0 @@ -import warnings -import functools - - -class ConfigDeprecatedWarning(DeprecationWarning): - pass - - -def deprecated(func): - """Mark functions as deprecated. - - It will result in a warning being emitted when the function is used. - """ - - @functools.wraps(func) - def new_func(*args, **kwargs): - warnings.simplefilter("always", ConfigDeprecatedWarning) - warnings.warn( - ( - "Deprecated import of function '{}'." - " Class was moved to 'openpype.lib.dateutils.{}'." - " Please change your imports." - ).format(func.__name__), - category=ConfigDeprecatedWarning - ) - return func(*args, **kwargs) - return new_func - - -@deprecated -def get_datetime_data(datetime_obj=None): - from .dateutils import get_datetime_data - - return get_datetime_data(datetime_obj) - - -@deprecated -def get_formatted_current_time(): - from .dateutils import get_formatted_current_time - - return get_formatted_current_time() diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index ffcfe9fa4d..efb542de75 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -1,81 +1,113 @@ """Functions useful for delivery action or loader""" import os import shutil -import glob -import clique -import collections - -from .path_templates import ( - StringTemplate, - TemplateUnsolved, -) +import functools +import warnings +class DeliveryDeprecatedWarning(DeprecationWarning): + pass + + +def deprecated(new_destination): + """Mark functions as deprecated. + + It will result in a warning being emitted when the function is used. + """ + + func = None + if callable(new_destination): + func = new_destination + new_destination = None + + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + warnings.simplefilter("always", DeliveryDeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(decorated_func.__name__, warning_message), + category=DeliveryDeprecatedWarning, + stacklevel=4 + ) + return decorated_func(*args, **kwargs) + return wrapper + + if func is None: + return _decorator + return _decorator(func) + + +@deprecated("openpype.lib.path_tools.collect_frames") def collect_frames(files): + """Returns dict of source path and its frame, if from sequence + + Uses clique as most precise solution, used when anatomy template that + created files is not known. + + Assumption is that frames are separated by '.', negative frames are not + allowed. + + Args: + files(list) or (set with single value): list of source paths + + Returns: + (dict): {'/asset/subset_v001.0001.png': '0001', ....} + + Deprecated: + Function was moved to different location and will be removed + after 3.16.* release. """ - Returns dict of source path and its frame, if from sequence - Uses clique as most precise solution, used when anatomy template that - created files is not known. + from .path_tools import collect_frames - Assumption is that frames are separated by '.', negative frames are not - allowed. + return collect_frames(files) - Args: - files(list) or (set with single value): list of source paths - Returns: - (dict): {'/asset/subset_v001.0001.png': '0001', ....} + +@deprecated("openpype.lib.path_tools.format_file_size") +def sizeof_fmt(num, suffix=None): + """Returns formatted string with size in appropriate unit + + Deprecated: + Function was moved to different location and will be removed + after 3.16.* release. """ - patterns = [clique.PATTERNS["frames"]] - collections, remainder = clique.assemble(files, minimum_items=1, - patterns=patterns) - sources_and_frames = {} - if collections: - for collection in collections: - src_head = collection.head - src_tail = collection.tail - - for index in collection.indexes: - src_frame = collection.format("{padding}") % index - src_file_name = "{}{}{}".format(src_head, src_frame, - src_tail) - sources_and_frames[src_file_name] = src_frame - else: - sources_and_frames[remainder.pop()] = None - - return sources_and_frames - - -def sizeof_fmt(num, suffix='B'): - """Returns formatted string with size in appropriate unit""" - for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']: - if abs(num) < 1024.0: - return "%3.1f%s%s" % (num, unit, suffix) - num /= 1024.0 - return "%.1f%s%s" % (num, 'Yi', suffix) + from .path_tools import format_file_size + return format_file_size(num, suffix) +@deprecated("openpype.pipeline.load.get_representation_path_with_anatomy") def path_from_representation(representation, anatomy): - try: - template = representation["data"]["template"] + """Get representation path using representation document and anatomy. - except KeyError: - return None + Args: + representation (Dict[str, Any]): Representation document. + anatomy (Anatomy): Project anatomy. - try: - context = representation["context"] - context["root"] = anatomy.roots - path = StringTemplate.format_strict_template(template, context) - return os.path.normpath(path) + Deprecated: + Function was moved to different location and will be removed + after 3.16.* release. + """ - except TemplateUnsolved: - # Template references unavailable data - return None + from openpype.pipeline.load import get_representation_path_with_anatomy - return path + return get_representation_path_with_anatomy(representation, anatomy) +@deprecated def copy_file(src_path, dst_path): """Hardlink file if possible(to save space), copy if not""" from openpype.lib import create_hard_link # safer importing @@ -91,131 +123,96 @@ def copy_file(src_path, dst_path): shutil.copyfile(src_path, dst_path) +@deprecated("openpype.pipeline.delivery.get_format_dict") def get_format_dict(anatomy, location_path): """Returns replaced root values from user provider value. - Args: - anatomy (Anatomy) - location_path (str): user provided value - Returns: - (dict): prepared for formatting of a template + Args: + anatomy (Anatomy) + location_path (str): user provided value + + Returns: + (dict): prepared for formatting of a template + + Deprecated: + Function was moved to different location and will be removed + after 3.16.* release. """ - format_dict = {} - if location_path: - location_path = location_path.replace("\\", "/") - root_names = anatomy.root_names_from_templates( - anatomy.templates["delivery"] - ) - if root_names is None: - format_dict["root"] = location_path - else: - format_dict["root"] = {} - for name in root_names: - format_dict["root"][name] = location_path - return format_dict + + from openpype.pipeline.delivery import get_format_dict + + return get_format_dict(anatomy, location_path) +@deprecated("openpype.pipeline.delivery.check_destination_path") def check_destination_path(repre_id, anatomy, anatomy_data, datetime_data, template_name): """ Try to create destination path based on 'template_name'. - In the case that path cannot be filled, template contains unmatched - keys, provide error message to filter out repre later. + In the case that path cannot be filled, template contains unmatched + keys, provide error message to filter out repre later. - Args: - anatomy (Anatomy) - anatomy_data (dict): context to fill anatomy - datetime_data (dict): values with actual date - template_name (str): to pick correct delivery template - Returns: - (collections.defauldict): {"TYPE_OF_ERROR":"ERROR_DETAIL"} + Args: + anatomy (Anatomy) + anatomy_data (dict): context to fill anatomy + datetime_data (dict): values with actual date + template_name (str): to pick correct delivery template + + Returns: + (collections.defauldict): {"TYPE_OF_ERROR":"ERROR_DETAIL"} + + Deprecated: + Function was moved to different location and will be removed + after 3.16.* release. """ - anatomy_data.update(datetime_data) - anatomy_filled = anatomy.format_all(anatomy_data) - dest_path = anatomy_filled["delivery"][template_name] - report_items = collections.defaultdict(list) - if not dest_path.solved: - msg = ( - "Missing keys in Representation's context" - " for anatomy template \"{}\"." - ).format(template_name) + from openpype.pipeline.delivery import check_destination_path - sub_msg = ( - "Representation: {}
" - ).format(repre_id) - - if dest_path.missing_keys: - keys = ", ".join(dest_path.missing_keys) - sub_msg += ( - "- Missing keys: \"{}\"
" - ).format(keys) - - if dest_path.invalid_types: - items = [] - for key, value in dest_path.invalid_types.items(): - items.append("\"{}\" {}".format(key, str(value))) - - keys = ", ".join(items) - sub_msg += ( - "- Invalid value DataType: \"{}\"
" - ).format(keys) - - report_items[msg].append(sub_msg) - - return report_items + return check_destination_path( + repre_id, + anatomy, + anatomy_data, + datetime_data, + template_name + ) +@deprecated("openpype.pipeline.delivery.deliver_single_file") def process_single_file( src_path, repre, anatomy, template_name, anatomy_data, format_dict, report_items, log ): """Copy single file to calculated path based on template - Args: - src_path(str): path of source representation file - _repre (dict): full repre, used only in process_sequence, here only - as to share same signature - anatomy (Anatomy) - template_name (string): user selected delivery template name - anatomy_data (dict): data from repre to fill anatomy with - format_dict (dict): root dictionary with names and values - report_items (collections.defaultdict): to return error messages - log (Logger): for log printing - Returns: - (collections.defaultdict , int) + Args: + src_path(str): path of source representation file + _repre (dict): full repre, used only in process_sequence, here only + as to share same signature + anatomy (Anatomy) + template_name (string): user selected delivery template name + anatomy_data (dict): data from repre to fill anatomy with + format_dict (dict): root dictionary with names and values + report_items (collections.defaultdict): to return error messages + log (Logger): for log printing + + Returns: + (collections.defaultdict , int) + + Deprecated: + Function was moved to different location and will be removed + after 3.16.* release. """ - # Make sure path is valid for all platforms - src_path = os.path.normpath(src_path.replace("\\", "/")) - if not os.path.exists(src_path): - msg = "{} doesn't exist for {}".format(src_path, repre["_id"]) - report_items["Source file was not found"].append(msg) - return report_items, 0 + from openpype.pipeline.delivery import deliver_single_file - anatomy_filled = anatomy.format(anatomy_data) - if format_dict: - template_result = anatomy_filled["delivery"][template_name] - delivery_path = template_result.rootless.format(**format_dict) - else: - delivery_path = anatomy_filled["delivery"][template_name] - - # Backwards compatibility when extension contained `.` - delivery_path = delivery_path.replace("..", ".") - # Make sure path is valid for all platforms - delivery_path = os.path.normpath(delivery_path.replace("\\", "/")) - - delivery_folder = os.path.dirname(delivery_path) - if not os.path.exists(delivery_folder): - os.makedirs(delivery_folder) - - log.debug("Copying single: {} -> {}".format(src_path, delivery_path)) - copy_file(src_path, delivery_path) - - return report_items, 1 + return deliver_single_file( + src_path, repre, anatomy, template_name, anatomy_data, format_dict, + report_items, log + ) +@deprecated("openpype.pipeline.delivery.deliver_sequence") def process_sequence( src_path, repre, anatomy, template_name, anatomy_data, format_dict, report_items, log @@ -223,128 +220,33 @@ def process_sequence( """ For Pype2(mainly - works in 3 too) where representation might not contain files. - Uses listing physical files (not 'files' on repre as a)might not be - present, b)might not be reliable for representation and copying them. + Uses listing physical files (not 'files' on repre as a)might not be + present, b)might not be reliable for representation and copying them. - TODO Should be refactored when files are sufficient to drive all - representations. + TODO Should be refactored when files are sufficient to drive all + representations. - Args: - src_path(str): path of source representation file - repre (dict): full representation - anatomy (Anatomy) - template_name (string): user selected delivery template name - anatomy_data (dict): data from repre to fill anatomy with - format_dict (dict): root dictionary with names and values - report_items (collections.defaultdict): to return error messages - log (Logger): for log printing - Returns: - (collections.defaultdict , int) + Args: + src_path(str): path of source representation file + repre (dict): full representation + anatomy (Anatomy) + template_name (string): user selected delivery template name + anatomy_data (dict): data from repre to fill anatomy with + format_dict (dict): root dictionary with names and values + report_items (collections.defaultdict): to return error messages + log (Logger): for log printing + + Returns: + (collections.defaultdict , int) + + Deprecated: + Function was moved to different location and will be removed + after 3.16.* release. """ - src_path = os.path.normpath(src_path.replace("\\", "/")) - def hash_path_exist(myPath): - res = myPath.replace('#', '*') - glob_search_results = glob.glob(res) - if len(glob_search_results) > 0: - return True - return False + from openpype.pipeline.delivery import deliver_sequence - if not hash_path_exist(src_path): - msg = "{} doesn't exist for {}".format(src_path, - repre["_id"]) - report_items["Source file was not found"].append(msg) - return report_items, 0 - - delivery_templates = anatomy.templates.get("delivery") or {} - delivery_template = delivery_templates.get(template_name) - if delivery_template is None: - msg = ( - "Delivery template \"{}\" in anatomy of project \"{}\"" - " was not found" - ).format(template_name, anatomy.project_name) - report_items[""].append(msg) - return report_items, 0 - - # Check if 'frame' key is available in template which is required - # for sequence delivery - if "{frame" not in delivery_template: - msg = ( - "Delivery template \"{}\" in anatomy of project \"{}\"" - "does not contain '{{frame}}' key to fill. Delivery of sequence" - " can't be processed." - ).format(template_name, anatomy.project_name) - report_items[""].append(msg) - return report_items, 0 - - dir_path, file_name = os.path.split(str(src_path)) - - context = repre["context"] - ext = context.get("ext", context.get("representation")) - - if not ext: - msg = "Source extension not found, cannot find collection" - report_items[msg].append(src_path) - log.warning("{} <{}>".format(msg, context)) - return report_items, 0 - - ext = "." + ext - # context.representation could be .psd - ext = ext.replace("..", ".") - - src_collections, remainder = clique.assemble(os.listdir(dir_path)) - src_collection = None - for col in src_collections: - if col.tail != ext: - continue - - src_collection = col - break - - if src_collection is None: - msg = "Source collection of files was not found" - report_items[msg].append(src_path) - log.warning("{} <{}>".format(msg, src_path)) - return report_items, 0 - - frame_indicator = "@####@" - - anatomy_data["frame"] = frame_indicator - anatomy_filled = anatomy.format(anatomy_data) - - if format_dict: - template_result = anatomy_filled["delivery"][template_name] - delivery_path = template_result.rootless.format(**format_dict) - else: - delivery_path = anatomy_filled["delivery"][template_name] - - delivery_path = os.path.normpath(delivery_path.replace("\\", "/")) - delivery_folder = os.path.dirname(delivery_path) - dst_head, dst_tail = delivery_path.split(frame_indicator) - dst_padding = src_collection.padding - dst_collection = clique.Collection( - head=dst_head, - tail=dst_tail, - padding=dst_padding + return deliver_sequence( + src_path, repre, anatomy, template_name, anatomy_data, format_dict, + report_items, log ) - - if not os.path.exists(delivery_folder): - os.makedirs(delivery_folder) - - src_head = src_collection.head - src_tail = src_collection.tail - uploaded = 0 - for index in src_collection.indexes: - src_padding = src_collection.format("{padding}") % index - src_file_name = "{}{}{}".format(src_head, src_padding, src_tail) - src = os.path.normpath( - os.path.join(dir_path, src_file_name) - ) - - dst_padding = dst_collection.format("{padding}") % index - dst = "{}{}{}".format(dst_head, dst_padding, dst_tail) - log.debug("Copying single: {} -> {}".format(src, dst)) - copy_file(src, dst) - uploaded += 1 - - return report_items, uploaded diff --git a/openpype/lib/editorial.py b/openpype/lib/editorial.py deleted file mode 100644 index 49220b4f15..0000000000 --- a/openpype/lib/editorial.py +++ /dev/null @@ -1,102 +0,0 @@ -"""Code related to editorial utility functions was moved -to 'openpype.pipeline.editorial' please change your imports as soon as -possible. File will be probably removed in OpenPype 3.14.* -""" - -import warnings -import functools - - -class EditorialDeprecatedWarning(DeprecationWarning): - pass - - -def editorial_deprecated(func): - """Mark functions as deprecated. - - It will result in a warning being emitted when the function is used. - """ - - @functools.wraps(func) - def new_func(*args, **kwargs): - warnings.simplefilter("always", EditorialDeprecatedWarning) - warnings.warn( - ( - "Call to deprecated function '{}'." - " Function was moved to 'openpype.pipeline.editorial'." - ).format(func.__name__), - category=EditorialDeprecatedWarning, - stacklevel=2 - ) - return func(*args, **kwargs) - return new_func - - -@editorial_deprecated -def otio_range_to_frame_range(*args, **kwargs): - from openpype.pipeline.editorial import otio_range_to_frame_range - - return otio_range_to_frame_range(*args, **kwargs) - - -@editorial_deprecated -def otio_range_with_handles(*args, **kwargs): - from openpype.pipeline.editorial import otio_range_with_handles - - return otio_range_with_handles(*args, **kwargs) - - -@editorial_deprecated -def is_overlapping_otio_ranges(*args, **kwargs): - from openpype.pipeline.editorial import is_overlapping_otio_ranges - - return is_overlapping_otio_ranges(*args, **kwargs) - - -@editorial_deprecated -def convert_to_padded_path(*args, **kwargs): - from openpype.pipeline.editorial import convert_to_padded_path - - return convert_to_padded_path(*args, **kwargs) - - -@editorial_deprecated -def trim_media_range(*args, **kwargs): - from openpype.pipeline.editorial import trim_media_range - - return trim_media_range(*args, **kwargs) - - -@editorial_deprecated -def range_from_frames(*args, **kwargs): - from openpype.pipeline.editorial import range_from_frames - - return range_from_frames(*args, **kwargs) - - -@editorial_deprecated -def frames_to_secons(*args, **kwargs): - from openpype.pipeline.editorial import frames_to_seconds - - return frames_to_seconds(*args, **kwargs) - - -@editorial_deprecated -def frames_to_timecode(*args, **kwargs): - from openpype.pipeline.editorial import frames_to_timecode - - return frames_to_timecode(*args, **kwargs) - - -@editorial_deprecated -def make_sequence_collection(*args, **kwargs): - from openpype.pipeline.editorial import make_sequence_collection - - return make_sequence_collection(*args, **kwargs) - - -@editorial_deprecated -def get_media_range_with_retimes(*args, **kwargs): - from openpype.pipeline.editorial import get_media_range_with_retimes - - return get_media_range_with_retimes(*args, **kwargs) diff --git a/openpype/lib/events.py b/openpype/lib/events.py index 301d62e2a6..747761fb3e 100644 --- a/openpype/lib/events.py +++ b/openpype/lib/events.py @@ -1,6 +1,7 @@ """Events holding data about specific event.""" import os import re +import copy import inspect import logging import weakref @@ -207,6 +208,12 @@ class Event(object): @property def source(self): + """Event's source used for triggering callbacks. + + Returns: + Union[str, None]: Source string or None. Source is optional. + """ + return self._source @property @@ -215,6 +222,12 @@ class Event(object): @property def topic(self): + """Event's topic used for triggering callbacks. + + Returns: + str: Topic string. + """ + return self._topic def emit(self): @@ -227,6 +240,42 @@ class Event(object): ) self._event_system.emit_event(self) + def to_data(self): + """Convert Event object to data. + + Returns: + Dict[str, Any]: Event data. + """ + + return { + "id": self.id, + "topic": self.topic, + "source": self.source, + "data": copy.deepcopy(self.data) + } + + @classmethod + def from_data(cls, event_data, event_system=None): + """Create event from data. + + Args: + event_data (Dict[str, Any]): Event data with defined keys. Can be + created using 'to_data' method. + event_system (EventSystem): System to which the event belongs. + + Returns: + Event: Event with attributes from passed data. + """ + + obj = cls( + event_data["topic"], + event_data["data"], + event_data["source"], + event_system + ) + obj._id = event_data["id"] + return obj + class EventSystem(object): """Encapsulate event handling into an object. diff --git a/openpype/lib/execute.py b/openpype/lib/execute.py index c3e35772f3..f1f2a4fa0a 100644 --- a/openpype/lib/execute.py +++ b/openpype/lib/execute.py @@ -5,7 +5,7 @@ import platform import json import tempfile -from .log import PypeLogger as Logger +from .log import Logger from .vendor_bin_utils import find_executable # MSDN process creation flag (Windows only) @@ -40,7 +40,7 @@ def execute(args, log_levels = ['DEBUG:', 'INFO:', 'ERROR:', 'WARNING:', 'CRITICAL:'] - log = Logger().get_logger('execute') + log = Logger.get_logger('execute') log.info("Executing ({})".format(" ".join(args))) popen = subprocess.Popen( args, diff --git a/openpype/lib/log.py b/openpype/lib/log.py index e77edea0e9..26dcd86eec 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -486,12 +486,18 @@ class Logger: class PypeLogger(Logger): + """Duplicate of 'Logger'. + + Deprecated: + Class will be removed after release version 3.16.* + """ + @classmethod def get_logger(cls, *args, **kwargs): logger = Logger.get_logger(*args, **kwargs) # TODO uncomment when replaced most of places - # logger.warning(( - # "'openpype.lib.PypeLogger' is deprecated class." - # " Please use 'openpype.lib.Logger' instead." - # )) + logger.warning(( + "'openpype.lib.PypeLogger' is deprecated class." + " Please use 'openpype.lib.Logger' instead." + )) return logger diff --git a/openpype/lib/path_templates.py b/openpype/lib/path_templates.py index e4b18ec258..b160054e38 100644 --- a/openpype/lib/path_templates.py +++ b/openpype/lib/path_templates.py @@ -6,11 +6,6 @@ import collections import six -from .log import PypeLogger - -log = PypeLogger.get_logger(__name__) - - KEY_PATTERN = re.compile(r"(\{.*?[^{0]*\})") KEY_PADDING_PATTERN = re.compile(r"([^:]+)\S+[><]\S+") SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)") diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index 4f28be3302..0b6d0a3391 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -1,19 +1,81 @@ import os import re -import abc -import json import logging -import six import platform +import functools +import warnings -from openpype.client import get_project -from openpype.settings import get_project_settings - -from .profiles_filtering import filter_profiles +import clique log = logging.getLogger(__name__) +class PathToolsDeprecatedWarning(DeprecationWarning): + pass + + +def deprecated(new_destination): + """Mark functions as deprecated. + + It will result in a warning being emitted when the function is used. + """ + + func = None + if callable(new_destination): + func = new_destination + new_destination = None + + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + warnings.simplefilter("always", PathToolsDeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(decorated_func.__name__, warning_message), + category=PathToolsDeprecatedWarning, + stacklevel=4 + ) + return decorated_func(*args, **kwargs) + return wrapper + + if func is None: + return _decorator + return _decorator(func) + + +def format_file_size(file_size, suffix=None): + """Returns formatted string with size in appropriate unit. + + Args: + file_size (int): Size of file in bytes. + suffix (str): Suffix for formatted size. Default is 'B' (as bytes). + + Returns: + str: Formatted size using proper unit and passed suffix (e.g. 7 MiB). + """ + + if suffix is None: + suffix = "B" + + for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]: + if abs(file_size) < 1024.0: + return "%3.1f%s%s" % (file_size, unit, suffix) + file_size /= 1024.0 + return "%.1f%s%s" % (file_size, "Yi", suffix) + + def create_hard_link(src_path, dst_path): """Create hardlink of file. @@ -50,6 +112,43 @@ def create_hard_link(src_path, dst_path): ) +def collect_frames(files): + """Returns dict of source path and its frame, if from sequence + + Uses clique as most precise solution, used when anatomy template that + created files is not known. + + Assumption is that frames are separated by '.', negative frames are not + allowed. + + Args: + files(list) or (set with single value): list of source paths + + Returns: + (dict): {'/asset/subset_v001.0001.png': '0001', ....} + """ + + patterns = [clique.PATTERNS["frames"]] + collections, remainder = clique.assemble( + files, minimum_items=1, patterns=patterns) + + sources_and_frames = {} + if collections: + for collection in collections: + src_head = collection.head + src_tail = collection.tail + + for index in collection.indexes: + src_frame = collection.format("{padding}") % index + src_file_name = "{}{}{}".format( + src_head, src_frame, src_tail) + sources_and_frames[src_file_name] = src_frame + else: + sources_and_frames[remainder.pop()] = None + + return sources_and_frames + + def _rreplace(s, a, b, n=1): """Replace a with b in string s from right side n times.""" return b.join(s.rsplit(a, n)) @@ -119,12 +218,12 @@ def get_version_from_path(file): """Find version number in file path string. Args: - file (string): file path + file (str): file path Returns: - v: version number in string ('001') - + str: version number in string ('001') """ + pattern = re.compile(r"[\._]v([0-9]+)", re.IGNORECASE) try: return pattern.findall(file)[-1] @@ -140,16 +239,17 @@ def get_last_version_from_path(path_dir, filter): """Find last version of given directory content. Args: - path_dir (string): directory path + path_dir (str): directory path filter (list): list of strings used as file name filter Returns: - string: file name with last version + str: file name with last version Example: last_version_file = get_last_version_from_path( "/project/shots/shot01/work", ["shot01", "compositing", "nk"]) """ + assert os.path.isdir(path_dir), "`path_dir` argument needs to be directory" assert isinstance(filter, list) and ( len(filter) != 0), "`filter` argument needs to be list and not empty" @@ -171,107 +271,69 @@ def get_last_version_from_path(path_dir, filter): return None +@deprecated("openpype.pipeline.project_folders.concatenate_splitted_paths") def concatenate_splitted_paths(split_paths, anatomy): - pattern_array = re.compile(r"\[.*\]") - output = [] - for path_items in split_paths: - clean_items = [] - if isinstance(path_items, str): - path_items = [path_items] + """ + Deprecated: + Function will be removed after release version 3.16.* + """ - for path_item in path_items: - if not re.match(r"{.+}", path_item): - path_item = re.sub(pattern_array, "", path_item) - clean_items.append(path_item) + from openpype.pipeline.project_folders import concatenate_splitted_paths - # backward compatibility - if "__project_root__" in path_items: - for root, root_path in anatomy.roots.items(): - if not os.path.exists(str(root_path)): - log.debug("Root {} path path {} not exist on \ - computer!".format(root, root_path)) - continue - clean_items = ["{{root[{}]}}".format(root), - r"{project[name]}"] + clean_items[1:] - output.append(os.path.normpath(os.path.sep.join(clean_items))) - continue - - output.append(os.path.normpath(os.path.sep.join(clean_items))) - - return output + return concatenate_splitted_paths(split_paths, anatomy) +@deprecated def get_format_data(anatomy): - project_doc = get_project(anatomy.project_name, fields=["data.code"]) - project_code = project_doc["data"]["code"] + """ + Deprecated: + Function will be removed after release version 3.16.* + """ - return { - "root": anatomy.roots, - "project": { - "name": anatomy.project_name, - "code": project_code - }, - } + from openpype.pipeline.template_data import get_project_template_data + + data = get_project_template_data(project_name=anatomy.project_name) + data["root"] = anatomy.roots + return data +@deprecated("openpype.pipeline.project_folders.fill_paths") def fill_paths(path_list, anatomy): - format_data = get_format_data(anatomy) - filled_paths = [] + """ + Deprecated: + Function will be removed after release version 3.16.* + """ - for path in path_list: - new_path = path.format(**format_data) - filled_paths.append(new_path) + from openpype.pipeline.project_folders import fill_paths - return filled_paths + return fill_paths(path_list, anatomy) +@deprecated("openpype.pipeline.project_folders.create_project_folders") def create_project_folders(basic_paths, project_name): - from openpype.pipeline import Anatomy - anatomy = Anatomy(project_name) + """ + Deprecated: + Function will be removed after release version 3.16.* + """ - concat_paths = concatenate_splitted_paths(basic_paths, anatomy) - filled_paths = fill_paths(concat_paths, anatomy) + from openpype.pipeline.project_folders import create_project_folders - # Create folders - for path in filled_paths: - if os.path.exists(path): - log.debug("Folder already exists: {}".format(path)) - else: - log.debug("Creating folder: {}".format(path)) - os.makedirs(path) - - -def _list_path_items(folder_structure): - output = [] - for key, value in folder_structure.items(): - if not value: - output.append(key) - else: - paths = _list_path_items(value) - for path in paths: - if not isinstance(path, (list, tuple)): - path = [path] - - item = [key] - item.extend(path) - output.append(item) - - return output + return create_project_folders(project_name, basic_paths) +@deprecated("openpype.pipeline.project_folders.get_project_basic_paths") def get_project_basic_paths(project_name): - project_settings = get_project_settings(project_name) - folder_structure = ( - project_settings["global"]["project_folder_structure"] - ) - if not folder_structure: - return [] + """ + Deprecated: + Function will be removed after release version 3.16.* + """ - if isinstance(folder_structure, str): - folder_structure = json.loads(folder_structure) - return _list_path_items(folder_structure) + from openpype.pipeline.project_folders import get_project_basic_paths + + return get_project_basic_paths(project_name) +@deprecated("openpype.pipeline.workfile.create_workdir_extra_folders") def create_workdir_extra_folders( workdir, host_name, task_type, task_name, project_name, project_settings=None @@ -288,193 +350,18 @@ def create_workdir_extra_folders( project_name (str): Name of project on which task is. project_settings (dict): Prepared project settings. Are loaded if not passed. - """ - # Load project settings if not set - if not project_settings: - project_settings = get_project_settings(project_name) - # Load extra folders profiles - extra_folders_profiles = ( - project_settings["global"]["tools"]["Workfiles"]["extra_folders"] + Deprecated: + Function will be removed after release version 3.16.* + """ + + from openpype.pipeline.project_folders import create_workdir_extra_folders + + return create_workdir_extra_folders( + workdir, + host_name, + task_type, + task_name, + project_name, + project_settings ) - # Skip if are empty - if not extra_folders_profiles: - return - - # Prepare profiles filters - filter_data = { - "task_types": task_type, - "task_names": task_name, - "hosts": host_name - } - profile = filter_profiles(extra_folders_profiles, filter_data) - if profile is None: - return - - for subfolder in profile["folders"]: - # Make sure backslashes are converted to forwards slashes - # and does not start with slash - subfolder = subfolder.replace("\\", "/").lstrip("/") - # Skip empty strings - if not subfolder: - continue - - fullpath = os.path.join(workdir, subfolder) - if not os.path.exists(fullpath): - os.makedirs(fullpath) - - -@six.add_metaclass(abc.ABCMeta) -class HostDirmap: - """ - Abstract class for running dirmap on a workfile in a host. - - Dirmap is used to translate paths inside of host workfile from one - OS to another. (Eg. arstist created workfile on Win, different artists - opens same file on Linux.) - - Expects methods to be implemented inside of host: - on_dirmap_enabled: run host code for enabling dirmap - do_dirmap: run host code to do actual remapping - """ - - def __init__(self, host_name, project_settings, sync_module=None): - self.host_name = host_name - self.project_settings = project_settings - self.sync_module = sync_module # to limit reinit of Modules - - self._mapping = None # cache mapping - - @abc.abstractmethod - def on_enable_dirmap(self): - """ - Run host dependent operation for enabling dirmap if necessary. - """ - - @abc.abstractmethod - def dirmap_routine(self, source_path, destination_path): - """ - Run host dependent remapping from source_path to destination_path - """ - - def process_dirmap(self): - # type: (dict) -> None - """Go through all paths in Settings and set them using `dirmap`. - - If artists has Site Sync enabled, take dirmap mapping directly from - Local Settings when artist is syncing workfile locally. - - Args: - project_settings (dict): Settings for current project. - - """ - if not self._mapping: - self._mapping = self.get_mappings(self.project_settings) - if not self._mapping: - return - - log.info("Processing directory mapping ...") - self.on_enable_dirmap() - log.info("mapping:: {}".format(self._mapping)) - - for k, sp in enumerate(self._mapping["source-path"]): - try: - print("{} -> {}".format(sp, - self._mapping["destination-path"][k])) - self.dirmap_routine(sp, - self._mapping["destination-path"][k]) - except IndexError: - # missing corresponding destination path - log.error(("invalid dirmap mapping, missing corresponding" - " destination directory.")) - break - except RuntimeError: - log.error("invalid path {} -> {}, mapping not registered".format( # noqa: E501 - sp, self._mapping["destination-path"][k] - )) - continue - - def get_mappings(self, project_settings): - """Get translation from source-path to destination-path. - - It checks if Site Sync is enabled and user chose to use local - site, in that case configuration in Local Settings takes precedence - """ - local_mapping = self._get_local_sync_dirmap(project_settings) - dirmap_label = "{}-dirmap".format(self.host_name) - if not self.project_settings[self.host_name].get(dirmap_label) and \ - not local_mapping: - return [] - mapping = local_mapping or \ - self.project_settings[self.host_name][dirmap_label]["paths"] or {} - enbled = self.project_settings[self.host_name][dirmap_label]["enabled"] - mapping_enabled = enbled or bool(local_mapping) - - if not mapping or not mapping_enabled or \ - not mapping.get("destination-path") or \ - not mapping.get("source-path"): - return [] - return mapping - - def _get_local_sync_dirmap(self, project_settings): - """ - Returns dirmap if synch to local project is enabled. - - Only valid mapping is from roots of remote site to local site set - in Local Settings. - - Args: - project_settings (dict) - Returns: - dict : { "source-path": [XXX], "destination-path": [YYYY]} - """ - import json - mapping = {} - - if not project_settings["global"]["sync_server"]["enabled"]: - return mapping - - from openpype.settings.lib import get_site_local_overrides - - if not self.sync_module: - from openpype.modules import ModulesManager - manager = ModulesManager() - self.sync_module = manager.modules_by_name["sync_server"] - - project_name = os.getenv("AVALON_PROJECT") - - active_site = self.sync_module.get_local_normalized_site( - self.sync_module.get_active_site(project_name)) - remote_site = self.sync_module.get_local_normalized_site( - self.sync_module.get_remote_site(project_name)) - log.debug("active {} - remote {}".format(active_site, remote_site)) - - if active_site == "local" \ - and project_name in self.sync_module.get_enabled_projects()\ - and active_site != remote_site: - - sync_settings = self.sync_module.get_sync_project_setting( - os.getenv("AVALON_PROJECT"), exclude_locals=False, - cached=False) - - active_overrides = get_site_local_overrides( - os.getenv("AVALON_PROJECT"), active_site) - remote_overrides = get_site_local_overrides( - os.getenv("AVALON_PROJECT"), remote_site) - - log.debug("local overrides".format(active_overrides)) - log.debug("remote overrides".format(remote_overrides)) - for root_name, active_site_dir in active_overrides.items(): - remote_site_dir = remote_overrides.get(root_name) or\ - sync_settings["sites"][remote_site]["root"][root_name] - if os.path.isdir(active_site_dir): - if not mapping.get("destination-path"): - mapping["destination-path"] = [] - mapping["destination-path"].append(active_site_dir) - - if not mapping.get("source-path"): - mapping["source-path"] = [] - mapping["source-path"].append(remote_site_dir) - - log.debug("local sync mapping:: {}".format(mapping)) - return mapping diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index 060db94ae0..1e157dfbfd 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -3,7 +3,6 @@ import os import logging import re -import json import warnings import functools @@ -11,13 +10,8 @@ import functools from openpype.client import get_asset_by_id from openpype.settings import get_project_settings -from .profiles_filtering import filter_profiles - log = logging.getLogger(__name__) -# Subset name template used when plugin does not have defined any -DEFAULT_SUBSET_TEMPLATE = "{family}{Variant}" - class PluginToolsDeprecatedWarning(DeprecationWarning): pass @@ -64,13 +58,14 @@ def deprecated(new_destination): return _decorator(func) -class TaskNotSetError(KeyError): - def __init__(self, msg=None): - if not msg: - msg = "Creator's subset name template requires task name." - super(TaskNotSetError, self).__init__(msg) +@deprecated("openpype.pipeline.create.TaskNotSetError") +def TaskNotSetError(*args, **kwargs): + from openpype.pipeline.create import TaskNotSetError + + return TaskNotSetError(*args, **kwargs) +@deprecated("openpype.pipeline.create.get_subset_name") def get_subset_name_with_asset_doc( family, variant, @@ -109,61 +104,22 @@ def get_subset_name_with_asset_doc( dbcon (AvalonMongoDB): Mongo connection to be able query asset document if 'asset_doc' is not passed. """ - if not family: - return "" - if not host_name: - host_name = os.environ["AVALON_APP"] + from openpype.pipeline.create import get_subset_name - # Use only last part of class family value split by dot (`.`) - family = family.rsplit(".", 1)[-1] - - if project_name is None: - from openpype.pipeline import legacy_io - - project_name = legacy_io.Session["AVALON_PROJECT"] - - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - task_info = asset_tasks.get(task_name) or {} - task_type = task_info.get("type") - - # Get settings - tools_settings = get_project_settings(project_name)["global"]["tools"] - profiles = tools_settings["creator"]["subset_name_profiles"] - filtering_criteria = { - "families": family, - "hosts": host_name, - "tasks": task_name, - "task_types": task_type - } - - matching_profile = filter_profiles(profiles, filtering_criteria) - template = None - if matching_profile: - template = matching_profile["template"] - - # Make sure template is set (matching may have empty string) - if not template: - template = default_template or DEFAULT_SUBSET_TEMPLATE - - # Simple check of task name existence for template with {task} in - # - missing task should be possible only in Standalone publisher - if not task_name and "{task" in template.lower(): - raise TaskNotSetError() - - fill_pairs = { - "variant": variant, - "family": family, - "task": task_name - } - if dynamic_data: - # Dynamic data may override default values - for key, value in dynamic_data.items(): - fill_pairs[key] = value - - return template.format(**prepare_template_data(fill_pairs)) + return get_subset_name( + family, + variant, + task_name, + asset_doc, + project_name, + host_name, + default_template, + dynamic_data + ) +@deprecated def get_subset_name( family, variant, @@ -183,16 +139,18 @@ def get_subset_name( `get_subset_name_with_asset_doc` where asset document is expected. """ + from openpype.pipeline.create import get_subset_name + if project_name is None: project_name = dbcon.project_name asset_doc = get_asset_by_id(project_name, asset_id, fields=["data.tasks"]) - return get_subset_name_with_asset_doc( + return get_subset_name( family, variant, task_name, - asset_doc or {}, + asset_doc, project_name, host_name, default_template, @@ -254,6 +212,9 @@ def filter_pyblish_plugins(plugins): Args: plugins (dict): Dictionary of plugins produced by :mod:`pyblish-base` `discover()` method. + + Deprecated: + Function will be removed after release version 3.15.* """ from openpype.pipeline.publish.lib import filter_pyblish_plugins @@ -277,6 +238,9 @@ def set_plugin_attributes_from_settings( Value from environment `AVALON_APP` is used if not entered. project_name (str): Name of project for which settings will be loaded. Value from environment `AVALON_PROJECT` is used if not entered. + + Deprecated: + Function will be removed after release version 3.15.* """ # Function is not used anymore @@ -373,102 +337,3 @@ def source_hash(filepath, *args): time = str(os.path.getmtime(filepath)) size = str(os.path.getsize(filepath)) return "|".join([file_name, time, size] + list(args)).replace(".", ",") - - -def get_unique_layer_name(layers, name): - """ - Gets all layer names and if 'name' is present in them, increases - suffix by 1 (eg. creates unique layer name - for Loader) - Args: - layers (list): of strings, names only - name (string): checked value - - Returns: - (string): name_00X (without version) - """ - names = {} - for layer in layers: - layer_name = re.sub(r'_\d{3}$', '', layer) - if layer_name in names.keys(): - names[layer_name] = names[layer_name] + 1 - else: - names[layer_name] = 1 - occurrences = names.get(name, 0) - - return "{}_{:0>3d}".format(name, occurrences + 1) - - -def get_background_layers(file_url): - """ - Pulls file name from background json file, enrich with folder url for - AE to be able import files. - - Order is important, follows order in json. - - Args: - file_url (str): abs url of background json - - Returns: - (list): of abs paths to images - """ - with open(file_url) as json_file: - data = json.load(json_file) - - layers = list() - bg_folder = os.path.dirname(file_url) - for child in data['children']: - if child.get("filename"): - layers.append(os.path.join(bg_folder, child.get("filename")). - replace("\\", "/")) - else: - for layer in child['children']: - if layer.get("filename"): - layers.append(os.path.join(bg_folder, - layer.get("filename")). - replace("\\", "/")) - return layers - - -def parse_json(path): - """Parses json file at 'path' location - - Returns: - (dict) or None if unparsable - Raises: - AsssertionError if 'path' doesn't exist - """ - path = path.strip('\"') - assert os.path.isfile(path), ( - "Path to json file doesn't exist. \"{}\"".format(path) - ) - data = None - with open(path, "r") as json_file: - try: - data = json.load(json_file) - except Exception as exc: - log.error( - "Error loading json: " - "{} - Exception: {}".format(path, exc) - ) - return data - - -def get_batch_asset_task_info(ctx): - """Parses context data from webpublisher's batch metadata - - Returns: - (tuple): asset, task_name (Optional), task_type - """ - task_type = "default_task_type" - task_name = None - asset = None - - if ctx["type"] == "task": - items = ctx["path"].split('/') - asset = items[-2] - task_name = ctx["name"] - task_type = ctx["attributes"]["type"] - else: - asset = ctx["name"] - - return asset, task_name, task_type diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 60d5d3ed4a..e736ba8ef0 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -42,6 +42,28 @@ XML_CHAR_REF_REGEX_HEX = re.compile(r"&#x?[0-9a-fA-F]+;") # Regex to parse array attributes ARRAY_TYPE_REGEX = re.compile(r"^(int|float|string)\[\d+\]$") +IMAGE_EXTENSIONS = [ + ".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave", ".cal", + ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr", ".fits", + ".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc", ".icer", + ".icns", ".ico", ".cur", ".ics", ".ilbm", ".jbig", ".jbig2", + ".jng", ".jpeg", ".jpeg-ls", ".jpeg", ".2000", ".jpg", ".xr", + ".jpeg", ".xt", ".jpeg-hdr", ".kra", ".mng", ".miff", ".nrrd", + ".ora", ".pam", ".pbm", ".pgm", ".ppm", ".pnm", ".pcx", ".pgf", + ".pictor", ".png", ".psb", ".psp", ".qtvr", ".ras", + ".rgbe", ".logluv", ".tiff", ".sgi", ".tga", ".tiff", ".tiff/ep", + ".tiff/it", ".ufo", ".ufp", ".wbmp", ".webp", ".xbm", ".xcf", + ".xpm", ".xwd" +] + +VIDEO_EXTENSIONS = [ + ".3g2", ".3gp", ".amv", ".asf", ".avi", ".drc", ".f4a", ".f4b", + ".f4p", ".f4v", ".flv", ".gif", ".gifv", ".m2v", ".m4p", ".m4v", + ".mkv", ".mng", ".mov", ".mp2", ".mp4", ".mpe", ".mpeg", ".mpg", + ".mpv", ".mxf", ".nsv", ".ogg", ".ogv", ".qt", ".rm", ".rmvb", + ".roq", ".svi", ".vob", ".webm", ".wmv", ".yuv" +] + def get_transcode_temp_directory(): """Creates temporary folder for transcoding. @@ -139,7 +161,7 @@ def convert_value_by_type_name(value_type, value, logger=None): return float(value) # Vectors will probably have more types - if value_type == "vec2f": + if value_type in ("vec2f", "float2"): return [float(item) for item in value.split(",")] # Matrix should be always have square size of element 3x3, 4x4 @@ -154,7 +176,7 @@ def convert_value_by_type_name(value_type, value, logger=None): elif parts_len == 4: divisor = 2 elif parts_len == 9: - divisor == 3 + divisor = 3 elif parts_len == 16: divisor = 4 else: @@ -204,8 +226,8 @@ def convert_value_by_type_name(value_type, value, logger=None): ) return output - logger.info(( - "MISSING IMPLEMENTATION:" + logger.debug(( + "Dev note (missing implementation):" " Unknown attrib type \"{}\". Value: {}" ).format(value_type, value)) return value @@ -263,8 +285,8 @@ def parse_oiio_xml_output(xml_string, logger=None): # - feel free to add more tags else: value = child.text - logger.info(( - "MISSING IMPLEMENTATION:" + logger.debug(( + "Dev note (missing implementation):" " Unknown tag \"{}\". Value \"{}\"" ).format(tag_name, value)) diff --git a/openpype/lib/vendor_bin_utils.py b/openpype/lib/vendor_bin_utils.py index e5ab2872a0..099f9a34ba 100644 --- a/openpype/lib/vendor_bin_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -1,10 +1,33 @@ import os import logging import platform +import subprocess log = logging.getLogger("Vendor utils") +class CachedToolPaths: + """Cache already used and discovered tools and their executables. + + Discovering path can take some time and can trigger subprocesses so it's + better to cache the paths on first get. + """ + + _cached_paths = {} + + @classmethod + def is_tool_cached(cls, tool): + return tool in cls._cached_paths + + @classmethod + def get_executable_path(cls, tool): + return cls._cached_paths.get(tool) + + @classmethod + def cache_executable_path(cls, tool, path): + cls._cached_paths[tool] = path + + def is_file_executable(filepath): """Filepath lead to executable file. @@ -98,6 +121,7 @@ def get_vendor_bin_path(bin_app): Returns: str: Path to vendorized binaries folder. """ + return os.path.join( os.environ["OPENPYPE_ROOT"], "vendor", @@ -107,6 +131,123 @@ def get_vendor_bin_path(bin_app): ) +def find_tool_in_custom_paths(paths, tool, validation_func=None): + """Find a tool executable in custom paths. + + Args: + paths (Iterable[str]): Iterable of paths where to look for tool. + tool (str): Name of tool (binary file) to find in passed paths. + validation_func (Function): Custom validation function of path. + Function must expect one argument which is path to executable. + If not passed only 'find_executable' is used to be able identify + if path is valid. + + Reuturns: + Union[str, None]: Path to validated executable or None if was not + found. + """ + + for path in paths: + # Skip empty strings + if not path: + continue + + # Handle cases when path is just an executable + # - it allows to use executable from PATH + # - basename must match 'tool' value (without extension) + extless_path, ext = os.path.splitext(path) + if extless_path == tool: + executable_path = find_executable(tool) + if executable_path and ( + validation_func is None + or validation_func(executable_path) + ): + return executable_path + continue + + # Normalize path because it should be a path and check if exists + normalized = os.path.normpath(path) + if not os.path.exists(normalized): + continue + + # Note: Path can be both file and directory + + # If path is a file validate it + if os.path.isfile(normalized): + basename, ext = os.path.splitext(os.path.basename(path)) + # Check if the filename has actually the sane bane as 'tool' + if basename == tool: + executable_path = find_executable(normalized) + if executable_path and ( + validation_func is None + or validation_func(executable_path) + ): + return executable_path + + # Check if path is a directory and look for tool inside the dir + if os.path.isdir(normalized): + executable_path = find_executable(os.path.join(normalized, tool)) + if executable_path and ( + validation_func is None + or validation_func(executable_path) + ): + return executable_path + return None + + +def _check_args_returncode(args): + try: + # Python 2 compatibility where DEVNULL is not available + if hasattr(subprocess, "DEVNULL"): + proc = subprocess.Popen( + args, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + proc.wait() + else: + with open(os.devnull, "w") as devnull: + proc = subprocess.Popen( + args, stdout=devnull, stderr=devnull, + ) + proc.wait() + + except Exception: + return False + return proc.returncode == 0 + + +def _oiio_executable_validation(filepath): + """Validate oiio tool executable if can be executed. + + Validation has 2 steps. First is using 'find_executable' to fill possible + missing extension or fill directory then launch executable and validate + that it can be executed. For that is used '--help' argument which is fast + and does not need any other inputs. + + Any possible crash of missing libraries or invalid build should be catched. + + Main reason is to validate if executable can be executed on OS just running + which can be issue ob linux machines. + + Note: + It does not validate if the executable is really a oiio tool which + should be used. + + Args: + filepath (str): Path to executable. + + Returns: + bool: Filepath is valid executable. + """ + + filepath = find_executable(filepath) + if not filepath: + return False + + return _check_args_returncode([filepath, "--help"]) + + def get_oiio_tools_path(tool="oiiotool"): """Path to vendorized OpenImageIO tool executables. @@ -117,10 +258,62 @@ def get_oiio_tools_path(tool="oiiotool"): Default is "oiiotool". """ - oiio_dir = get_vendor_bin_path("oiio") - if platform.system().lower() == "linux": - oiio_dir = os.path.join(oiio_dir, "bin") - return find_executable(os.path.join(oiio_dir, tool)) + if CachedToolPaths.is_tool_cached(tool): + return CachedToolPaths.get_executable_path(tool) + + custom_paths_str = os.environ.get("OPENPYPE_OIIO_PATHS") or "" + tool_executable_path = find_tool_in_custom_paths( + custom_paths_str.split(os.pathsep), + tool, + _oiio_executable_validation + ) + + if not tool_executable_path: + oiio_dir = get_vendor_bin_path("oiio") + if platform.system().lower() == "linux": + oiio_dir = os.path.join(oiio_dir, "bin") + default_path = os.path.join(oiio_dir, tool) + if _oiio_executable_validation(default_path): + tool_executable_path = default_path + + # Look to PATH for the tool + if not tool_executable_path: + from_path = find_executable(tool) + if from_path and _oiio_executable_validation(from_path): + tool_executable_path = from_path + + CachedToolPaths.cache_executable_path(tool, tool_executable_path) + return tool_executable_path + + +def _ffmpeg_executable_validation(filepath): + """Validate ffmpeg tool executable if can be executed. + + Validation has 2 steps. First is using 'find_executable' to fill possible + missing extension or fill directory then launch executable and validate + that it can be executed. For that is used '-version' argument which is fast + and does not need any other inputs. + + Any possible crash of missing libraries or invalid build should be catched. + + Main reason is to validate if executable can be executed on OS just running + which can be issue ob linux machines. + + Note: + It does not validate if the executable is really a ffmpeg tool. + + Args: + filepath (str): Path to executable. + + Returns: + bool: Filepath is valid executable. + """ + + filepath = find_executable(filepath) + if not filepath: + return False + + return _check_args_returncode([filepath, "-version"]) def get_ffmpeg_tool_path(tool="ffmpeg"): @@ -133,10 +326,33 @@ def get_ffmpeg_tool_path(tool="ffmpeg"): Returns: str: Full path to ffmpeg executable. """ - ffmpeg_dir = get_vendor_bin_path("ffmpeg") - if platform.system().lower() == "windows": - ffmpeg_dir = os.path.join(ffmpeg_dir, "bin") - return find_executable(os.path.join(ffmpeg_dir, tool)) + + if CachedToolPaths.is_tool_cached(tool): + return CachedToolPaths.get_executable_path(tool) + + custom_paths_str = os.environ.get("OPENPYPE_FFMPEG_PATHS") or "" + tool_executable_path = find_tool_in_custom_paths( + custom_paths_str.split(os.pathsep), + tool, + _ffmpeg_executable_validation + ) + + if not tool_executable_path: + ffmpeg_dir = get_vendor_bin_path("ffmpeg") + if platform.system().lower() == "windows": + ffmpeg_dir = os.path.join(ffmpeg_dir, "bin") + tool_path = find_executable(os.path.join(ffmpeg_dir, tool)) + if tool_path and _ffmpeg_executable_validation(tool_path): + tool_executable_path = tool_path + + # Look to PATH for the tool + if not tool_executable_path: + from_path = find_executable(tool) + if from_path and _oiio_executable_validation(from_path): + tool_executable_path = from_path + + CachedToolPaths.cache_executable_path(tool, tool_executable_path) + return tool_executable_path def is_oiio_supported(): diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index 68b5f6c247..02e7dc13ab 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -2,7 +2,6 @@ from .base import ( OpenPypeModule, OpenPypeAddOn, - OpenPypeInterface, load_modules, @@ -20,7 +19,6 @@ from .base import ( __all__ = ( "OpenPypeModule", "OpenPypeAddOn", - "OpenPypeInterface", "load_modules", diff --git a/openpype/modules/base.py b/openpype/modules/base.py index e26075283d..09aea50424 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -13,7 +13,6 @@ from uuid import uuid4 from abc import ABCMeta, abstractmethod import six -import openpype from openpype.settings import ( get_system_settings, SYSTEM_SETTINGS_KEY, @@ -26,7 +25,20 @@ from openpype.settings.lib import ( get_studio_system_settings_overrides, load_json_file ) -from openpype.lib import PypeLogger + +from openpype.lib import ( + Logger, + import_filepath, + import_module_from_dirpath +) + +from .interfaces import ( + OpenPypeInterface, + IPluginPaths, + IHostAddon, + ITrayModule, + ITrayService +) # Files that will be always ignored on modules import IGNORED_FILENAMES = ( @@ -93,7 +105,7 @@ class _ModuleClass(object): def log(self): if self._log is None: super(_ModuleClass, self).__setattr__( - "_log", PypeLogger.get_logger(self.name) + "_log", Logger.get_logger(self.name) ) return self._log @@ -278,19 +290,13 @@ def load_modules(force=False): def _load_modules(): - # Import helper functions from lib - from openpype.lib import ( - import_filepath, - import_module_from_dirpath - ) - # Key under which will be modules imported in `sys.modules` modules_key = "openpype_modules" # Change `sys.modules` sys.modules[modules_key] = openpype_modules = _ModuleClass(modules_key) - log = PypeLogger.get_logger("ModulesLoader") + log = Logger.get_logger("ModulesLoader") # Look for OpenPype modules in paths defined with `get_module_dirs` # - dynamically imported OpenPype modules and addons @@ -391,31 +397,6 @@ def _load_modules(): log.error(msg, exc_info=True) -class _OpenPypeInterfaceMeta(ABCMeta): - """OpenPypeInterface meta class to print proper string.""" - - def __str__(self): - return "<'OpenPypeInterface.{}'>".format(self.__name__) - - def __repr__(self): - return str(self) - - -@six.add_metaclass(_OpenPypeInterfaceMeta) -class OpenPypeInterface: - """Base class of Interface that can be used as Mixin with abstract parts. - - This is way how OpenPype module or addon can tell that has implementation - for specific part or for other module/addon. - - Child classes of OpenPypeInterface may be used as mixin in different - OpenPype modules which means they have to have implemented methods defined - in the interface. By default interface does not have any abstract parts. - """ - - pass - - @six.add_metaclass(ABCMeta) class OpenPypeModule: """Base class of pype module. @@ -440,7 +421,7 @@ class OpenPypeModule: def __init__(self, manager, settings): self.manager = manager - self.log = PypeLogger.get_logger(self.name) + self.log = Logger.get_logger(self.name) self.initialize(settings) @@ -562,6 +543,40 @@ class ModulesManager: self.initialize_modules() self.connect_modules() + def __getitem__(self, module_name): + return self.modules_by_name[module_name] + + def get(self, module_name, default=None): + """Access module by name. + + Args: + module_name (str): Name of module which should be returned. + default (Any): Default output if module is not available. + + Returns: + Union[OpenPypeModule, None]: Module found by name or None. + """ + return self.modules_by_name.get(module_name, default) + + def get_enabled_module(self, module_name, default=None): + """Fast access to enabled module. + + If module is available but is not enabled default value is returned. + + Args: + module_name (str): Name of module which should be returned. + default (Any): Default output if module is not available or is + not enabled. + + Returns: + Union[OpenPypeModule, None]: Enabled module found by name or None. + """ + + module = self.get(module_name) + if module is not None and module.enabled: + return module + return default + def initialize_modules(self): """Import and initialize modules.""" # Make sure modules are loaded @@ -715,8 +730,6 @@ class ModulesManager: and "actions" each containing list of paths. """ # Output structure - from openpype_interfaces import IPluginPaths - output = { "publish": [], "create": [], @@ -773,8 +786,6 @@ class ModulesManager: list: List of creator plugin paths. """ # Output structure - from openpype_interfaces import IPluginPaths - output = [] for module in self.get_enabled_modules(): # Skip module that do not inherit from `IPluginPaths` @@ -789,68 +800,6 @@ class ModulesManager: output.extend(paths) return output - def collect_launch_hook_paths(self, app): - """Helper to collect application launch hooks. - - It used to be based on 'ILaunchHookPaths' which is not true anymore. - Module just have to have implemented 'get_launch_hook_paths' method. - - Args: - app (Application): Application object which can be used for - filtering of which launch hook paths are returned. - - Returns: - list: Paths to launch hook directories. - """ - - str_type = type("") - expected_types = (list, tuple, set) - - output = [] - for module in self.get_enabled_modules(): - # Skip module if does not have implemented 'get_launch_hook_paths' - func = getattr(module, "get_launch_hook_paths", None) - if func is None: - continue - - func = module.get_launch_hook_paths - if hasattr(inspect, "signature"): - sig = inspect.signature(func) - expect_args = len(sig.parameters) > 0 - else: - expect_args = len(inspect.getargspec(func)[0]) > 0 - - # Pass application argument if method expect it. - try: - if expect_args: - hook_paths = func(app) - else: - hook_paths = func() - except Exception: - self.log.warning( - "Failed to call 'get_launch_hook_paths'", - exc_info=True - ) - continue - - if not hook_paths: - continue - - # Convert string to list - if isinstance(hook_paths, str_type): - hook_paths = [hook_paths] - - # Skip invalid types - if not isinstance(hook_paths, expected_types): - self.log.warning(( - "Result of `get_launch_hook_paths`" - " has invalid type {}. Expected {}" - ).format(type(hook_paths), expected_types)) - continue - - output.extend(hook_paths) - return output - def get_host_module(self, host_name): """Find host module by host name. @@ -859,15 +808,13 @@ class ModulesManager: Returns: OpenPypeModule: Found host module by name. - None: There was not found module inheriting IHostModule which has + None: There was not found module inheriting IHostAddon which has host name set to passed 'host_name'. """ - from openpype_interfaces import IHostModule - for module in self.get_enabled_modules(): if ( - isinstance(module, IHostModule) + isinstance(module, IHostAddon) and module.host_name == host_name ): return module @@ -878,15 +825,13 @@ class ModulesManager: Returns: Iterable[str]: All available host names based on enabled modules - inheriting 'IHostModule'. + inheriting 'IHostAddon'. """ - from openpype_interfaces import IHostModule - host_names = { module.host_name for module in self.get_enabled_modules() - if isinstance(module, IHostModule) + if isinstance(module, IHostAddon) } return host_names @@ -1025,7 +970,7 @@ class TrayModulesManager(ModulesManager): ) def __init__(self): - self.log = PypeLogger.get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) self.modules = [] self.modules_by_id = {} @@ -1064,8 +1009,6 @@ class TrayModulesManager(ModulesManager): self.tray_menu(tray_menu) def get_enabled_tray_modules(self): - from openpype_interfaces import ITrayModule - output = [] for module in self.modules: if module.enabled and isinstance(module, ITrayModule): @@ -1141,8 +1084,6 @@ class TrayModulesManager(ModulesManager): self._report["Tray menu"] = report def start_modules(self): - from openpype_interfaces import ITrayService - report = {} time_start = time.time() prev_start_time = time_start @@ -1201,7 +1142,7 @@ def get_module_settings_defs(): settings_defs = [] - log = PypeLogger.get_logger("ModuleSettingsLoad") + log = Logger.get_logger("ModuleSettingsLoad") for raw_module in openpype_modules: for attr_name in dir(raw_module): diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 0bad981fdf..512ff800ee 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -9,6 +9,7 @@ import os from abc import abstractmethod import platform import getpass +from functools import partial from collections import OrderedDict import six @@ -66,6 +67,96 @@ def requests_get(*args, **kwargs): return requests.get(*args, **kwargs) +class DeadlineKeyValueVar(dict): + """ + + Serializes dictionary key values as "{key}={value}" like Deadline uses + for EnvironmentKeyValue. + + As an example: + EnvironmentKeyValue0="A_KEY=VALUE_A" + EnvironmentKeyValue1="OTHER_KEY=VALUE_B" + + The keys are serialized in alphabetical order (sorted). + + Example: + >>> var = DeadlineKeyValueVar("EnvironmentKeyValue") + >>> var["my_var"] = "hello" + >>> var["my_other_var"] = "hello2" + >>> var.serialize() + + + """ + def __init__(self, key): + super(DeadlineKeyValueVar, self).__init__() + self.__key = key + + def serialize(self): + key = self.__key + + # Allow custom location for index in serialized string + if "{}" not in key: + key = key + "{}" + + return { + key.format(index): "{}={}".format(var_key, var_value) + for index, (var_key, var_value) in enumerate(sorted(self.items())) + } + + +class DeadlineIndexedVar(dict): + """ + + Allows to set and query values by integer indices: + Query: var[1] or var.get(1) + Set: var[1] = "my_value" + Append: var += "value" + + Note: Iterating the instance is not guarantueed to be the order of the + indices. To do so iterate with `sorted()` + + """ + def __init__(self, key): + super(DeadlineIndexedVar, self).__init__() + self.__key = key + + def serialize(self): + key = self.__key + + # Allow custom location for index in serialized string + if "{}" not in key: + key = key + "{}" + + return { + key.format(index): value for index, value in sorted(self.items()) + } + + def next_available_index(self): + # Add as first unused entry + i = 0 + while i in self.keys(): + i += 1 + return i + + def update(self, data): + # Force the integer key check + for key, value in data.items(): + self.__setitem__(key, value) + + def __iadd__(self, other): + index = self.next_available_index() + self[index] = other + return self + + def __setitem__(self, key, value): + if not isinstance(key, int): + raise TypeError("Key must be an integer: {}".format(key)) + + if key < 0: + raise ValueError("Negative index can't be set: {}".format(key)) + dict.__setitem__(self, key, value) + + @attr.s class DeadlineJobInfo(object): """Mapping of all Deadline *JobInfo* attributes. @@ -218,24 +309,8 @@ class DeadlineJobInfo(object): # Environment # ---------------------------------------------- - _environmentKeyValue = attr.ib(factory=list) - - @property - def EnvironmentKeyValue(self): # noqa: N802 - """Return all environment key values formatted for Deadline. - - Returns: - dict: as `{'EnvironmentKeyValue0', 'key=value'}` - - """ - out = {} - for index, v in enumerate(self._environmentKeyValue): - out["EnvironmentKeyValue{}".format(index)] = v - return out - - @EnvironmentKeyValue.setter - def EnvironmentKeyValue(self, val): # noqa: N802 - self._environmentKeyValue.append(val) + EnvironmentKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar, + "EnvironmentKeyValue")) IncludeEnvironment = attr.ib(default=None) # Default: false UseJobEnvironmentOnly = attr.ib(default=None) # Default: false @@ -243,121 +318,29 @@ class DeadlineJobInfo(object): # Job Extra Info # ---------------------------------------------- - _extraInfos = attr.ib(factory=list) - _extraInfoKeyValues = attr.ib(factory=list) - - @property - def ExtraInfo(self): # noqa: N802 - """Return all ExtraInfo values formatted for Deadline. - - Returns: - dict: as `{'ExtraInfo0': 'value'}` - - """ - out = {} - for index, v in enumerate(self._extraInfos): - out["ExtraInfo{}".format(index)] = v - return out - - @ExtraInfo.setter - def ExtraInfo(self, val): # noqa: N802 - self._extraInfos.append(val) - - @property - def ExtraInfoKeyValue(self): # noqa: N802 - """Return all ExtraInfoKeyValue values formatted for Deadline. - - Returns: - dict: as {'ExtraInfoKeyValue0': 'key=value'}` - - """ - out = {} - for index, v in enumerate(self._extraInfoKeyValues): - out["ExtraInfoKeyValue{}".format(index)] = v - return out - - @ExtraInfoKeyValue.setter - def ExtraInfoKeyValue(self, val): # noqa: N802 - self._extraInfoKeyValues.append(val) + ExtraInfo = attr.ib(factory=partial(DeadlineIndexedVar, "ExtraInfo")) + ExtraInfoKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar, + "ExtraInfoKeyValue")) # Task Extra Info Names # ---------------------------------------------- OverrideTaskExtraInfoNames = attr.ib(default=None) # Default: false - _taskExtraInfos = attr.ib(factory=list) - - @property - def TaskExtraInfoName(self): # noqa: N802 - """Return all TaskExtraInfoName values formatted for Deadline. - - Returns: - dict: as `{'TaskExtraInfoName0': 'value'}` - - """ - out = {} - for index, v in enumerate(self._taskExtraInfos): - out["TaskExtraInfoName{}".format(index)] = v - return out - - @TaskExtraInfoName.setter - def TaskExtraInfoName(self, val): # noqa: N802 - self._taskExtraInfos.append(val) + TaskExtraInfoName = attr.ib(factory=partial(DeadlineIndexedVar, + "TaskExtraInfoName")) # Output # ---------------------------------------------- - _outputFilename = attr.ib(factory=list) - _outputFilenameTile = attr.ib(factory=list) - _outputDirectory = attr.ib(factory=list) + OutputFilename = attr.ib(factory=partial(DeadlineIndexedVar, + "OutputFilename")) + OutputFilenameTile = attr.ib(factory=partial(DeadlineIndexedVar, + "OutputFilename{}Tile")) + OutputDirectory = attr.ib(factory=partial(DeadlineIndexedVar, + "OutputDirectory")) - @property - def OutputFilename(self): # noqa: N802 - """Return all OutputFilename values formatted for Deadline. - - Returns: - dict: as `{'OutputFilename0': 'filename'}` - - """ - out = {} - for index, v in enumerate(self._outputFilename): - out["OutputFilename{}".format(index)] = v - return out - - @OutputFilename.setter - def OutputFilename(self, val): # noqa: N802 - self._outputFilename.append(val) - - @property - def OutputFilenameTile(self): # noqa: N802 - """Return all OutputFilename#Tile values formatted for Deadline. - - Returns: - dict: as `{'OutputFilenme#Tile': 'tile'}` - - """ - out = {} - for index, v in enumerate(self._outputFilenameTile): - out["OutputFilename{}Tile".format(index)] = v - return out - - @OutputFilenameTile.setter - def OutputFilenameTile(self, val): # noqa: N802 - self._outputFilenameTile.append(val) - - @property - def OutputDirectory(self): # noqa: N802 - """Return all OutputDirectory values formatted for Deadline. - - Returns: - dict: as `{'OutputDirectory0': 'dir'}` - - """ - out = {} - for index, v in enumerate(self._outputDirectory): - out["OutputDirectory{}".format(index)] = v - return out - - @OutputDirectory.setter - def OutputDirectory(self, val): # noqa: N802 - self._outputDirectory.append(val) + # Asset Dependency + # ---------------------------------------------- + AssetDependency = attr.ib(factory=partial(DeadlineIndexedVar, + "AssetDependency")) # Tile Job # ---------------------------------------------- @@ -381,7 +364,7 @@ class DeadlineJobInfo(object): """ def filter_data(a, v): - if a.name.startswith("_"): + if isinstance(v, (DeadlineIndexedVar, DeadlineKeyValueVar)): return False if v is None: return False @@ -389,15 +372,27 @@ class DeadlineJobInfo(object): serialized = attr.asdict( self, dict_factory=OrderedDict, filter=filter_data) - serialized.update(self.EnvironmentKeyValue) - serialized.update(self.ExtraInfo) - serialized.update(self.ExtraInfoKeyValue) - serialized.update(self.TaskExtraInfoName) - serialized.update(self.OutputFilename) - serialized.update(self.OutputFilenameTile) - serialized.update(self.OutputDirectory) + + # Custom serialize these attributes + for attribute in [ + self.EnvironmentKeyValue, + self.ExtraInfo, + self.ExtraInfoKeyValue, + self.TaskExtraInfoName, + self.OutputFilename, + self.OutputFilenameTile, + self.OutputDirectory, + self.AssetDependency + ]: + serialized.update(attribute.serialize()) + return serialized + def update(self, data): + """Update instance with data dict""" + for key, value in data.items(): + setattr(self, key, value) + @six.add_metaclass(AbstractMetaInstancePlugin) class AbstractSubmitDeadline(pyblish.api.InstancePlugin): @@ -521,68 +516,72 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): published. """ - anatomy = self._instance.context.data['anatomy'] - file_path = None - for i in self._instance.context: - if "workfile" in i.data["families"] \ - or i.data["family"] == "workfile": - # test if there is instance of workfile waiting - # to be published. - assert i.data["publish"] is True, ( - "Workfile (scene) must be published along") - # determine published path from Anatomy. - template_data = i.data.get("anatomyData") - rep = i.data.get("representations")[0].get("ext") - template_data["representation"] = rep - template_data["ext"] = rep - template_data["comment"] = None - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled["publish"]["path"] - file_path = os.path.normpath(template_filled) - self.log.info("Using published scene for render {}".format( - file_path)) + instance = self._instance + workfile_instance = self._get_workfile_instance(instance.context) + if workfile_instance is None: + return - if not os.path.exists(file_path): - self.log.error("published scene does not exist!") - raise + # determine published path from Anatomy. + template_data = workfile_instance.data.get("anatomyData") + rep = workfile_instance.data.get("representations")[0] + template_data["representation"] = rep.get("name") + template_data["ext"] = rep.get("ext") + template_data["comment"] = None - if not replace_in_path: - return file_path + anatomy = instance.context.data['anatomy'] + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled["publish"]["path"] + file_path = os.path.normpath(template_filled) - # now we need to switch scene in expected files - # because token will now point to published - # scene file and that might differ from current one - new_scene = os.path.splitext( - os.path.basename(file_path))[0] - orig_scene = os.path.splitext( - os.path.basename( - self._instance.context.data["currentFile"]))[0] - exp = self._instance.data.get("expectedFiles") + self.log.info("Using published scene for render {}".format(file_path)) - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - new_exp = {} - for aov, files in exp[0].items(): - replaced_files = [] - for f in files: - replaced_files.append( - str(f).replace(orig_scene, new_scene) - ) - new_exp[aov] = replaced_files - # [] might be too much here, TODO - self._instance.data["expectedFiles"] = [new_exp] - else: - new_exp = [] - for f in exp: - new_exp.append( - str(f).replace(orig_scene, new_scene) - ) - self._instance.data["expectedFiles"] = new_exp + if not os.path.exists(file_path): + self.log.error("published scene does not exist!") + raise - self.log.info("Scene name was switched {} -> {}".format( - orig_scene, new_scene - )) + if not replace_in_path: + return file_path + + # now we need to switch scene in expected files + # because token will now point to published + # scene file and that might differ from current one + def _clean_name(path): + return os.path.splitext(os.path.basename(path))[0] + + new_scene = _clean_name(file_path) + orig_scene = _clean_name(instance.context.data["currentFile"]) + expected_files = instance.data.get("expectedFiles") + + if isinstance(expected_files[0], dict): + # we have aovs and we need to iterate over them + new_exp = {} + for aov, files in expected_files[0].items(): + replaced_files = [] + for f in files: + replaced_files.append( + str(f).replace(orig_scene, new_scene) + ) + new_exp[aov] = replaced_files + # [] might be too much here, TODO + instance.data["expectedFiles"] = [new_exp] + else: + new_exp = [] + for f in expected_files: + new_exp.append( + str(f).replace(orig_scene, new_scene) + ) + instance.data["expectedFiles"] = new_exp + + metadata_folder = instance.data.get("publishRenderMetadataFolder") + if metadata_folder: + metadata_folder = metadata_folder.replace(orig_scene, + new_scene) + instance.data["publishRenderMetadataFolder"] = metadata_folder + + self.log.info("Scene name was switched {} -> {}".format( + orig_scene, new_scene + )) return file_path @@ -645,3 +644,22 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): self._instance.data["deadlineSubmissionJob"] = result return result["_id"] + + @staticmethod + def _get_workfile_instance(context): + """Find workfile instance in context""" + for i in context: + + is_workfile = ( + "workfile" in i.data.get("families", []) or + i.data["family"] == "workfile" + ) + if not is_workfile: + continue + + # test if there is instance of workfile waiting + # to be published. + assert i.data["publish"] is True, ( + "Workfile (scene) must be published along") + + return i diff --git a/openpype/modules/deadline/deadline_module.py b/openpype/modules/deadline/deadline_module.py index c30db75188..bbd0f74e8a 100644 --- a/openpype/modules/deadline/deadline_module.py +++ b/openpype/modules/deadline/deadline_module.py @@ -3,7 +3,7 @@ import requests import six import sys -from openpype.lib import requests_get, PypeLogger +from openpype.lib import requests_get, Logger from openpype.modules import OpenPypeModule from openpype_interfaces import IPluginPaths @@ -58,7 +58,7 @@ class DeadlineModule(OpenPypeModule, IPluginPaths): """ if not log: - log = PypeLogger.get_logger(__name__) + log = Logger.get_logger(__name__) argument = "{}/api/pools?NamesOnly=true".format(webservice) try: diff --git a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py b/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py index a7035cd99f..9981bead3e 100644 --- a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py +++ b/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py @@ -13,7 +13,7 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder + 0.415 label = "Deadline Webservice from the Instance" - families = ["rendering"] + families = ["rendering", "renderlayer"] def process(self, instance): instance.data["deadlineUrl"] = self._collect_deadline_url(instance) diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index c55f85c8da..0c1ffa6bd7 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -3,8 +3,10 @@ import attr import getpass import pyblish.api -from openpype.lib import env_value_to_bool -from openpype.lib.delivery import collect_frames +from openpype.lib import ( + env_value_to_bool, + collect_frames, +) from openpype.pipeline import legacy_io from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo @@ -65,9 +67,9 @@ class AfterEffectsSubmitDeadline( dln_job_info.Group = self.group dln_job_info.Department = self.department dln_job_info.ChunkSize = self.chunk_size - dln_job_info.OutputFilename = \ + dln_job_info.OutputFilename += \ os.path.basename(self._instance.data["expectedFiles"][0]) - dln_job_info.OutputDirectory = \ + dln_job_info.OutputDirectory += \ os.path.dirname(self._instance.data["expectedFiles"][0]) dln_job_info.JobDelay = "00:00:00" @@ -90,13 +92,12 @@ class AfterEffectsSubmitDeadline( environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) for key in keys: - val = environment.get(key) - if val: - dln_job_info.EnvironmentKeyValue = "{key}={value}".format( - key=key, - value=val) + value = environment.get(key) + if value: + dln_job_info.EnvironmentKeyValue[key] = value + # to recognize job from PYPE for turning Event On/Off - dln_job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1" + dln_job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1" return dln_job_info diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py index 3f9c09b592..6327143623 100644 --- a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -284,14 +284,12 @@ class HarmonySubmitDeadline( environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) for key in keys: - val = environment.get(key) - if val: - job_info.EnvironmentKeyValue = "{key}={value}".format( - key=key, - value=val) + value = environment.get(key) + if value: + job_info.EnvironmentKeyValue[key] = value # to recognize job from PYPE for turning Event On/Off - job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1" + job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1" return job_info diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 7966861358..3398e1725e 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -18,7 +18,6 @@ Attributes: from __future__ import print_function import os -import json import getpass import copy import re @@ -27,45 +26,723 @@ from datetime import datetime import itertools from collections import OrderedDict -import clique -import requests +import attr from maya import cmds -import pyblish.api - -from openpype.lib import requests_post -from openpype.hosts.maya.api import lib from openpype.pipeline import legacy_io -# Documentation for keys available at: -# https://docs.thinkboxsoftware.com -# /products/deadline/8.0/1_User%20Manual/manual -# /manual-submission.html#job-info-file-options +from openpype.hosts.maya.api.lib_rendersettings import RenderSettings +from openpype.hosts.maya.api.lib import get_attr_in_layer -payload_skeleton_template = { - "JobInfo": { - "BatchName": None, # Top-level group name - "Name": None, # Job name, as seen in Monitor - "UserName": None, - "Plugin": "MayaBatch", - "Frames": "{start}-{end}x{step}", - "Comment": None, - "Priority": 50, - }, - "PluginInfo": { - "SceneFile": None, # Input - "OutputFilePath": None, # Output directory and filename - "OutputFilePrefix": None, - "Version": cmds.about(version=True), # Mandatory for Deadline - "UsingRenderLayers": True, - "RenderLayer": None, # Render only this layer - "Renderer": None, - "ProjectPath": None, # Resolve relative references - "RenderSetupIncludeLights": None, # Include all lights flag. - }, - "AuxFiles": [] # Mandatory for Deadline, may be empty -} +from openpype_modules.deadline import abstract_submit_deadline +from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo + + +def _validate_deadline_bool_value(instance, attribute, value): + if not isinstance(value, (str, bool)): + raise TypeError( + "Attribute {} must be str or bool.".format(attribute)) + if value not in {"1", "0", True, False}: + raise ValueError( + ("Value of {} must be one of " + "'0', '1', True, False").format(attribute) + ) + + +@attr.s +class MayaPluginInfo(object): + SceneFile = attr.ib(default=None) # Input + OutputFilePath = attr.ib(default=None) # Output directory and filename + OutputFilePrefix = attr.ib(default=None) + Version = attr.ib(default=None) # Mandatory for Deadline + UsingRenderLayers = attr.ib(default=True) + RenderLayer = attr.ib(default=None) # Render only this layer + Renderer = attr.ib(default=None) + ProjectPath = attr.ib(default=None) # Resolve relative references + # Include all lights flag + RenderSetupIncludeLights = attr.ib( + default="1", validator=_validate_deadline_bool_value) + + +@attr.s +class PythonPluginInfo(object): + ScriptFile = attr.ib() + Version = attr.ib(default="3.6") + Arguments = attr.ib(default=None) + SingleFrameOnly = attr.ib(default=None) + + +@attr.s +class VRayPluginInfo(object): + InputFilename = attr.ib(default=None) # Input + SeparateFilesPerFrame = attr.ib(default=None) + VRayEngine = attr.ib(default="V-Ray") + Width = attr.ib(default=None) + Height = attr.ib(default=None) # Mandatory for Deadline + OutputFilePath = attr.ib(default=True) + OutputFileName = attr.ib(default=None) # Render only this layer + + +@attr.s +class ArnoldPluginInfo(object): + ArnoldFile = attr.ib(default=None) + + +class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): + + label = "Submit Render to Deadline" + hosts = ["maya"] + families = ["renderlayer"] + targets = ["local"] + + tile_assembler_plugin = "OpenPypeTileAssembler" + priority = 50 + tile_priority = 50 + limit = [] # limit groups + jobInfo = {} + pluginInfo = {} + group = "none" + + def get_job_info(self): + job_info = DeadlineJobInfo(Plugin="MayaBatch") + + # todo: test whether this works for existing production cases + # where custom jobInfo was stored in the project settings + job_info.update(self.jobInfo) + + instance = self._instance + context = instance.context + + # Always use the original work file name for the Job name even when + # rendering is done from the published Work File. The original work + # file name is clearer because it can also have subversion strings, + # etc. which are stripped for the published file. + src_filepath = context.data["currentFile"] + src_filename = os.path.basename(src_filepath) + + job_info.Name = "%s - %s" % (src_filename, instance.name) + job_info.BatchName = src_filename + job_info.Plugin = instance.data.get("mayaRenderPlugin", "MayaBatch") + job_info.UserName = context.data.get("deadlineUser", getpass.getuser()) + + # Deadline requires integers in frame range + frames = "{start}-{end}x{step}".format( + start=int(instance.data["frameStartHandle"]), + end=int(instance.data["frameEndHandle"]), + step=int(instance.data["byFrameStep"]), + ) + job_info.Frames = frames + + job_info.Pool = instance.data.get("primaryPool") + job_info.SecondaryPool = instance.data.get("secondaryPool") + job_info.ChunkSize = instance.data.get("chunkSize", 10) + job_info.Comment = context.data.get("comment") + job_info.Priority = instance.data.get("priority", self.priority) + job_info.FramesPerTask = instance.data.get("framesPerTask", 1) + + if self.group != "none" and self.group: + job_info.Group = self.group + + if self.limit: + job_info.LimitGroups = ",".join(self.limit) + + # Add options from RenderGlobals + render_globals = instance.data.get("renderGlobals", {}) + job_info.update(render_globals) + + keys = [ + "FTRACK_API_KEY", + "FTRACK_API_USER", + "FTRACK_SERVER", + "OPENPYPE_SG_USER", + "AVALON_PROJECT", + "AVALON_ASSET", + "AVALON_TASK", + "AVALON_APP_NAME", + "OPENPYPE_DEV", + "OPENPYPE_VERSION" + ] + # Add mongo url if it's enabled + if self._instance.context.data.get("deadlinePassMongoUrl"): + keys.append("OPENPYPE_MONGO") + + environment = dict({key: os.environ[key] for key in keys + if key in os.environ}, **legacy_io.Session) + + for key in keys: + value = environment.get(key) + if not value: + continue + job_info.EnvironmentKeyValue[key] = value + + # to recognize job from PYPE for turning Event On/Off + job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1" + job_info.EnvironmentKeyValue["OPENPYPE_LOG_NO_COLORS"] = "1" + + # Adding file dependencies. + if self.asset_dependencies: + dependencies = instance.context.data["fileDependencies"] + dependencies.append(context.data["currentFile"]) + for dependency in dependencies: + job_info.AssetDependency += dependency + + # Add list of expected files to job + # --------------------------------- + exp = instance.data.get("expectedFiles") + for filepath in self._iter_expected_files(exp): + job_info.OutputDirectory += os.path.dirname(filepath) + job_info.OutputFilename += os.path.basename(filepath) + + return job_info + + def get_plugin_info(self): + + instance = self._instance + context = instance.context + + # Set it to default Maya behaviour if it cannot be determined + # from instance (but it should be, by the Collector). + + default_rs_include_lights = ( + instance.context.data['project_settings'] + ['maya'] + ['RenderSettings'] + ['enable_all_lights'] + ) + + rs_include_lights = instance.data.get( + "renderSetupIncludeLights", default_rs_include_lights) + if rs_include_lights not in {"1", "0", True, False}: + rs_include_lights = default_rs_include_lights + plugin_info = MayaPluginInfo( + SceneFile=self.scene_path, + Version=cmds.about(version=True), + RenderLayer=instance.data['setMembers'], + Renderer=instance.data["renderer"], + RenderSetupIncludeLights=rs_include_lights, # noqa + ProjectPath=context.data["workspaceDir"], + UsingRenderLayers=True, + ) + + plugin_payload = attr.asdict(plugin_info) + + # Patching with pluginInfo from settings + for key, value in self.pluginInfo.items(): + plugin_payload[key] = value + + return plugin_payload + + def process_submission(self): + + instance = self._instance + context = instance.context + + filepath = self.scene_path # publish if `use_publish` else workfile + + # TODO: Avoid the need for this logic here, needed for submit publish + # Store output dir for unified publisher (filesequence) + expected_files = instance.data["expectedFiles"] + first_file = next(self._iter_expected_files(expected_files)) + output_dir = os.path.dirname(first_file) + instance.data["outputDir"] = output_dir + instance.data["toBeRenderedOn"] = "deadline" + + # Patch workfile (only when use_published is enabled) + if self.use_published: + self._patch_workfile() + + # Gather needed data ------------------------------------------------ + workspace = context.data["workspaceDir"] + default_render_file = instance.context.data.get('project_settings')\ + .get('maya')\ + .get('RenderSettings')\ + .get('default_render_image_folder') + filename = os.path.basename(filepath) + dirname = os.path.join(workspace, default_render_file) + + # Fill in common data to payload ------------------------------------ + # TODO: Replace these with collected data from CollectRender + payload_data = { + "filename": filename, + "dirname": dirname, + } + + # Submit preceding export jobs ------------------------------------- + export_job = None + assert not all(x in instance.data["families"] + for x in ['vrayscene', 'assscene']), ( + "Vray Scene and Ass Scene options are mutually exclusive") + + if "vrayscene" in instance.data["families"]: + self.log.debug("Submitting V-Ray scene render..") + vray_export_payload = self._get_vray_export_payload(payload_data) + export_job = self.submit(vray_export_payload) + + payload = self._get_vray_render_payload(payload_data) + + elif "assscene" in instance.data["families"]: + self.log.debug("Submitting Arnold .ass standalone render..") + ass_export_payload = self._get_arnold_export_payload(payload_data) + export_job = self.submit(ass_export_payload) + + payload = self._get_arnold_render_payload(payload_data) + else: + self.log.debug("Submitting MayaBatch render..") + payload = self._get_maya_payload(payload_data) + + # Add export job as dependency -------------------------------------- + if export_job: + job_info, _ = payload + job_info.JobDependency = export_job + + if instance.data.get("tileRendering"): + # Prepare tiles data + self._tile_render(payload) + else: + # Submit main render job + job_info, plugin_info = payload + self.submit(self.assemble_payload(job_info, plugin_info)) + + def _tile_render(self, payload): + """Submit as tile render per frame with dependent assembly jobs.""" + + # As collected by super process() + instance = self._instance + + payload_job_info, payload_plugin_info = payload + job_info = copy.deepcopy(payload_job_info) + plugin_info = copy.deepcopy(payload_plugin_info) + + # if we have sequence of files, we need to create tile job for + # every frame + job_info.TileJob = True + job_info.TileJobTilesInX = instance.data.get("tilesX") + job_info.TileJobTilesInY = instance.data.get("tilesY") + + tiles_count = job_info.TileJobTilesInX * job_info.TileJobTilesInY + + plugin_info["ImageHeight"] = instance.data.get("resolutionHeight") + plugin_info["ImageWidth"] = instance.data.get("resolutionWidth") + plugin_info["RegionRendering"] = True + + R_FRAME_NUMBER = re.compile( + r".+\.(?P[0-9]+)\..+") # noqa: N806, E501 + REPL_FRAME_NUMBER = re.compile( + r"(.+\.)([0-9]+)(\..+)") # noqa: N806, E501 + + exp = instance.data["expectedFiles"] + if isinstance(exp[0], dict): + # we have aovs and we need to iterate over them + # get files from `beauty` + files = exp[0].get("beauty") + # assembly files are used for assembly jobs as we need to put + # together all AOVs + assembly_files = list( + itertools.chain.from_iterable( + [f for _, f in exp[0].items()])) + if not files: + # if beauty doesn't exist, use first aov we found + files = exp[0].get(list(exp[0].keys())[0]) + else: + files = exp + assembly_files = files + + # Define frame tile jobs + frame_file_hash = {} + frame_payloads = {} + file_index = 1 + for file in files: + frame = re.search(R_FRAME_NUMBER, file).group("frame") + + new_job_info = copy.deepcopy(job_info) + new_job_info.Name += " (Frame {} - {} tiles)".format(frame, + tiles_count) + new_job_info.TileJobFrame = frame + + new_plugin_info = copy.deepcopy(plugin_info) + + # Add tile data into job info and plugin info + tiles_data = _format_tiles( + file, 0, + instance.data.get("tilesX"), + instance.data.get("tilesY"), + instance.data.get("resolutionWidth"), + instance.data.get("resolutionHeight"), + payload_plugin_info["OutputFilePrefix"] + )[0] + + new_job_info.update(tiles_data["JobInfo"]) + new_plugin_info.update(tiles_data["PluginInfo"]) + + self.log.info("hashing {} - {}".format(file_index, file)) + job_hash = hashlib.sha256( + ("{}_{}".format(file_index, file)).encode("utf-8")) + + file_hash = job_hash.hexdigest() + frame_file_hash[frame] = file_hash + + new_job_info.ExtraInfo[0] = file_hash + new_job_info.ExtraInfo[1] = file + + frame_payloads[frame] = self.assemble_payload( + job_info=new_job_info, + plugin_info=new_plugin_info + ) + file_index += 1 + + self.log.info( + "Submitting tile job(s) [{}] ...".format(len(frame_payloads))) + + # Submit frame tile jobs + frame_tile_job_id = {} + for frame, tile_job_payload in frame_payloads.items(): + job_id = self.submit(tile_job_payload) + frame_tile_job_id[frame] = job_id + + # Define assembly payloads + assembly_job_info = copy.deepcopy(job_info) + assembly_job_info.Plugin = self.tile_assembler_plugin + assembly_job_info.Name += " - Tile Assembly Job" + assembly_job_info.Frames = 1 + assembly_job_info.MachineLimit = 1 + assembly_job_info.Priority = instance.data.get("tile_priority", + self.tile_priority) + + assembly_plugin_info = { + "CleanupTiles": 1, + "ErrorOnMissing": True, + "Renderer": self._instance.data["renderer"] + } + + assembly_payloads = [] + output_dir = self.job_info.OutputDirectory[0] + for file in assembly_files: + frame = re.search(R_FRAME_NUMBER, file).group("frame") + + frame_assembly_job_info = copy.deepcopy(assembly_job_info) + frame_assembly_job_info.Name += " (Frame {})".format(frame) + frame_assembly_job_info.OutputFilename[0] = re.sub( + REPL_FRAME_NUMBER, + "\\1{}\\3".format("#" * len(frame)), file) + + file_hash = frame_file_hash[frame] + tile_job_id = frame_tile_job_id[frame] + + frame_assembly_job_info.ExtraInfo[0] = file_hash + frame_assembly_job_info.ExtraInfo[1] = file + frame_assembly_job_info.JobDependency = tile_job_id + + # write assembly job config files + now = datetime.now() + + config_file = os.path.join( + output_dir, + "{}_config_{}.txt".format( + os.path.splitext(file)[0], + now.strftime("%Y_%m_%d_%H_%M_%S") + ) + ) + try: + if not os.path.isdir(output_dir): + os.makedirs(output_dir) + except OSError: + # directory is not available + self.log.warning("Path is unreachable: " + "`{}`".format(output_dir)) + + with open(config_file, "w") as cf: + print("TileCount={}".format(tiles_count), file=cf) + print("ImageFileName={}".format(file), file=cf) + print("ImageWidth={}".format( + instance.data.get("resolutionWidth")), file=cf) + print("ImageHeight={}".format( + instance.data.get("resolutionHeight")), file=cf) + + tiles = _format_tiles( + file, 0, + instance.data.get("tilesX"), + instance.data.get("tilesY"), + instance.data.get("resolutionWidth"), + instance.data.get("resolutionHeight"), + payload_plugin_info["OutputFilePrefix"] + )[1] + for k, v in sorted(tiles.items()): + print("{}={}".format(k, v), file=cf) + + payload = self.assemble_payload( + job_info=frame_assembly_job_info, + plugin_info=assembly_plugin_info.copy(), + # todo: aux file transfers don't work with deadline webservice + # add config file as job auxFile + # aux_files=[config_file] + ) + assembly_payloads.append(payload) + + # Submit assembly jobs + assembly_job_ids = [] + num_assemblies = len(assembly_payloads) + for i, payload in enumerate(assembly_payloads): + self.log.info( + "submitting assembly job {} of {}".format(i + 1, + num_assemblies) + ) + assembly_job_id = self.submit(payload) + assembly_job_ids.append(assembly_job_id) + + instance.data["assemblySubmissionJobs"] = assembly_job_ids + + def _get_maya_payload(self, data): + + job_info = copy.deepcopy(self.job_info) + + if self.asset_dependencies: + # Asset dependency to wait for at least the scene file to sync. + job_info.AssetDependency += self.scene_path + + # Get layer prefix + renderlayer = self._instance.data["setMembers"] + renderer = self._instance.data["renderer"] + layer_prefix_attr = RenderSettings.get_image_prefix_attr(renderer) + layer_prefix = get_attr_in_layer(layer_prefix_attr, layer=renderlayer) + + plugin_info = copy.deepcopy(self.plugin_info) + plugin_info.update({ + # Output directory and filename + "OutputFilePath": data["dirname"].replace("\\", "/"), + "OutputFilePrefix": layer_prefix, + }) + + # This hack is here because of how Deadline handles Renderman version. + # it considers everything with `renderman` set as version older than + # Renderman 22, and so if we are using renderman > 21 we need to set + # renderer string on the job to `renderman22`. We will have to change + # this when Deadline releases new version handling this. + renderer = self._instance.data["renderer"] + if renderer == "renderman": + try: + from rfm2.config import cfg # noqa + except ImportError: + raise Exception("Cannot determine renderman version") + + rman_version = cfg().build_info.version() # type: str + if int(rman_version.split(".")[0]) > 22: + renderer = "renderman22" + + plugin_info["Renderer"] = renderer + + # this is needed because renderman plugin in Deadline + # handles directory and file prefixes separately + plugin_info["OutputFilePath"] = job_info.OutputDirectory[0] + + return job_info, plugin_info + + def _get_vray_export_payload(self, data): + + job_info = copy.deepcopy(self.job_info) + job_info.Name = self._job_info_label("Export") + + # Get V-Ray settings info to compute output path + vray_scene = self.format_vray_output_filename() + + plugin_info = { + "Renderer": "vray", + "SkipExistingFrames": True, + "UseLegacyRenderLayers": True, + "OutputFilePath": os.path.dirname(vray_scene) + } + + return job_info, attr.asdict(plugin_info) + + def _get_arnold_export_payload(self, data): + + try: + from openpype.scripts import export_maya_ass_job + except Exception: + raise AssertionError( + "Expected module 'export_maya_ass_job' to be available") + + module_path = export_maya_ass_job.__file__ + if module_path.endswith(".pyc"): + module_path = module_path[: -len(".pyc")] + ".py" + + script = os.path.normpath(module_path) + + job_info = copy.deepcopy(self.job_info) + job_info.Name = self._job_info_label("Export") + + # Force a single frame Python job + job_info.Plugin = "Python" + job_info.Frames = 1 + + renderlayer = self._instance.data["setMembers"] + + # add required env vars for the export script + envs = { + "AVALON_APP_NAME": os.environ.get("AVALON_APP_NAME"), + "OPENPYPE_ASS_EXPORT_RENDER_LAYER": renderlayer, + "OPENPYPE_ASS_EXPORT_SCENE_FILE": self.scene_path, + "OPENPYPE_ASS_EXPORT_OUTPUT": job_info.OutputFilename[0], + "OPENPYPE_ASS_EXPORT_START": int(self._instance.data["frameStartHandle"]), # noqa + "OPENPYPE_ASS_EXPORT_END": int(self._instance.data["frameEndHandle"]), # noqa + "OPENPYPE_ASS_EXPORT_STEP": 1 + } + for key, value in envs.items(): + if not value: + continue + job_info.EnvironmentKeyValue[key] = value + + plugin_info = PythonPluginInfo( + ScriptFile=script, + Version="3.6", + Arguments="", + SingleFrameOnly="True" + ) + + return job_info, attr.asdict(plugin_info) + + def _get_vray_render_payload(self, data): + + # Job Info + job_info = copy.deepcopy(self.job_info) + job_info.Name = self._job_info_label("Render") + job_info.Plugin = "Vray" + job_info.OverrideTaskExtraInfoNames = False + + # Plugin Info + plugin_info = VRayPluginInfo( + InputFilename=self.format_vray_output_filename(), + SeparateFilesPerFrame=False, + VRayEngine="V-Ray", + Width=self._instance.data["resolutionWidth"], + Height=self._instance.data["resolutionHeight"], + OutputFilePath=job_info.OutputDirectory[0], + OutputFileName=job_info.OutputFilename[0] + ) + + return job_info, attr.asdict(plugin_info) + + def _get_arnold_render_payload(self, data): + + # Job Info + job_info = copy.deepcopy(self.job_info) + job_info.Name = self._job_info_label("Render") + job_info.Plugin = "Arnold" + job_info.OverrideTaskExtraInfoNames = False + + # Plugin Info + ass_file, _ = os.path.splitext(data["output_filename_0"]) + ass_filepath = ass_file + ".ass" + + plugin_info = ArnoldPluginInfo( + ArnoldFile=ass_filepath + ) + + return job_info, attr.asdict(plugin_info) + + def format_vray_output_filename(self): + """Format the expected output file of the Export job. + + Example: + /_/ + "shot010_v006/shot010_v006_CHARS/CHARS_0001.vrscene" + Returns: + str + + """ + + # "vrayscene//_/" + vray_settings = cmds.ls(type="VRaySettingsNode") + node = vray_settings[0] + template = cmds.getAttr("{}.vrscene_filename".format(node)) + scene, _ = os.path.splitext(self.scene_path) + + def smart_replace(string, key_values): + new_string = string + for key, value in key_values.items(): + new_string = new_string.replace(key, value) + return new_string + + # Get workfile scene path without extension to format vrscene_filename + scene_filename = os.path.basename(self.scene_path) + scene_filename_no_ext, _ = os.path.splitext(scene_filename) + + layer = self._instance.data['setMembers'] + + # Reformat without tokens + output_path = smart_replace( + template, + {"": scene_filename_no_ext, + "": layer}) + + start_frame = int(self._instance.data["frameStartHandle"]) + workspace = self._instance.context.data["workspace"] + filename_zero = "{}_{:04d}.vrscene".format(output_path, start_frame) + filepath_zero = os.path.join(workspace, filename_zero) + + return filepath_zero.replace("\\", "/") + + def _patch_workfile(self): + """Patch Maya scene. + + This will take list of patches (lines to add) and apply them to + *published* Maya scene file (that is used later for rendering). + + Patches are dict with following structure:: + { + "name": "Name of patch", + "regex": "regex of line before patch", + "line": "line to insert" + } + + """ + project_settings = self._instance.context.data["project_settings"] + patches = ( + project_settings.get( + "deadline", {}).get( + "publish", {}).get( + "MayaSubmitDeadline", {}).get( + "scene_patches", {}) + ) + if not patches: + return + + if not os.path.splitext(self.scene_path)[1].lower() != ".ma": + self.log.debug("Skipping workfile patch since workfile is not " + ".ma file") + return + + compiled_regex = [re.compile(p["regex"]) for p in patches] + with open(self.scene_path, "r+") as pf: + scene_data = pf.readlines() + for ln, line in enumerate(scene_data): + for i, r in enumerate(compiled_regex): + if re.match(r, line): + scene_data.insert(ln + 1, patches[i]["line"]) + pf.seek(0) + pf.writelines(scene_data) + pf.truncate() + self.log.info("Applied {} patch to scene.".format( + patches[i]["name"] + )) + + def _job_info_label(self, label): + return "{label} {job.Name} [{start}-{end}]".format( + label=label, + job=self.job_info, + start=int(self._instance.data["frameStartHandle"]), + end=int(self._instance.data["frameEndHandle"]), + ) + + @staticmethod + def _iter_expected_files(exp): + if isinstance(exp[0], dict): + for _aov, files in exp[0].items(): + for file in files: + yield file + else: + for file in exp: + yield file def _format_tiles( @@ -89,12 +766,12 @@ def _format_tiles( Example:: Image prefix is: - `maya///_` + `//_` Result for tile 0 for 4x4 will be: - `maya///_tile_1x1_4x4__` + `//_tile_1x1_4x4__` - Calculating coordinates is tricky as in Job they are defined as top, + Calculating coordinates is tricky as in Job they are defined as top, left, bottom, right with zero being in top-left corner. But Assembler configuration file takes tile coordinates as X, Y, Width and Height and zero is bottom left corner. @@ -103,25 +780,32 @@ def _format_tiles( filename (str): Filename to process as tiles. index (int): Index of that file if it is sequence. tiles_x (int): Number of tiles in X. - tiles_y (int): Number if tikes in Y. + tiles_y (int): Number of tiles in Y. width (int): Width resolution of final image. height (int): Height resolution of final image. prefix (str): Image prefix. Returns: - (dict, dict): Tuple of two dictionaires - first can be used to + (dict, dict): Tuple of two dictionaries - first can be used to extend JobInfo, second has tiles x, y, width and height used for assembler configuration. """ - tile = 0 + # Math used requires integers for correct output - as such + # we ensure our inputs are correct. + assert type(tiles_x) is int, "tiles_x must be an integer" + assert type(tiles_y) is int, "tiles_y must be an integer" + assert type(width) is int, "width must be an integer" + assert type(height) is int, "height must be an integer" + out = {"JobInfo": {}, "PluginInfo": {}} cfg = OrderedDict() - w_space = width / tiles_x - h_space = height / tiles_y + w_space = width // tiles_x + h_space = height // tiles_y cfg["TilesCropped"] = "False" + tile = 0 for tile_x in range(1, tiles_x + 1): for tile_y in reversed(range(1, tiles_y + 1)): tile_prefix = "_tile_{}x{}_{}x{}_".format( @@ -129,1034 +813,38 @@ def _format_tiles( tiles_x, tiles_y ) - out_tile_index = "OutputFilename{}Tile{}".format( - str(index), tile - ) + new_filename = "{}/{}{}".format( os.path.dirname(filename), tile_prefix, os.path.basename(filename) ) - out["JobInfo"][out_tile_index] = new_filename + + top = height - (tile_y * h_space) + bottom = height - ((tile_y - 1) * h_space) - 1 + left = (tile_x - 1) * w_space + right = (tile_x * w_space) - 1 + + # Job info + out["JobInfo"]["OutputFilename{}Tile{}".format(index, tile)] = new_filename # noqa: E501 + + # Plugin Info out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) + out["PluginInfo"]["RegionTop{}".format(tile)] = top + out["PluginInfo"]["RegionBottom{}".format(tile)] = bottom + out["PluginInfo"]["RegionLeft{}".format(tile)] = left + out["PluginInfo"]["RegionRight{}".format(tile)] = right - out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 - out["PluginInfo"]["RegionBottom{}".format(tile)] = int(height) - ((tile_y - 1) * h_space) - 1 # noqa: E501 - out["PluginInfo"]["RegionLeft{}".format(tile)] = (tile_x - 1) * w_space # noqa: E501 - out["PluginInfo"]["RegionRight{}".format(tile)] = (tile_x * w_space) - 1 # noqa: E501 - + # Tile config cfg["Tile{}".format(tile)] = new_filename cfg["Tile{}Tile".format(tile)] = new_filename cfg["Tile{}FileName".format(tile)] = new_filename - cfg["Tile{}X".format(tile)] = (tile_x - 1) * w_space - - cfg["Tile{}Y".format(tile)] = int(height) - (tile_y * h_space) - + cfg["Tile{}X".format(tile)] = left + cfg["Tile{}Y".format(tile)] = top cfg["Tile{}Width".format(tile)] = w_space cfg["Tile{}Height".format(tile)] = h_space tile += 1 + return out, cfg - - -def get_renderer_variables(renderlayer, root): - """Retrieve the extension which has been set in the VRay settings. - - Will return None if the current renderer is not VRay - For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which - start with `rs`. Use the actual node name, do NOT use the `nice name` - - Args: - renderlayer (str): the node name of the renderlayer. - root (str): base path to render - - Returns: - dict - - """ - renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer()) - render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"]) - - padding = cmds.getAttr("{}.{}".format(render_attrs["node"], - render_attrs["padding"])) - - filename_0 = cmds.renderSettings( - fullPath=True, - gin="#" * int(padding), - lut=True, - layer=renderlayer or lib.get_current_renderlayer())[0] - filename_0 = re.sub('_', '_beauty', - filename_0, flags=re.IGNORECASE) - prefix_attr = "defaultRenderGlobals.imageFilePrefix" - - scene = cmds.file(query=True, sceneName=True) - scene, _ = os.path.splitext(os.path.basename(scene)) - - if renderer == "vray": - renderlayer = renderlayer.split("_")[-1] - # Maya's renderSettings function does not return V-Ray file extension - # so we get the extension from vraySettings - extension = cmds.getAttr("vraySettings.imageFormatStr") - - # When V-Ray image format has not been switched once from default .png - # the getAttr command above returns None. As such we explicitly set - # it to `.png` - if extension is None: - extension = "png" - - if extension in ["exr (multichannel)", "exr (deep)"]: - extension = "exr" - - prefix_attr = "vraySettings.fileNamePrefix" - filename_prefix = cmds.getAttr(prefix_attr) - # we need to determine path for vray as maya `renderSettings` query - # does not work for vray. - - filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = "{}.{}.{}".format( - filename_0, "#" * int(padding), extension) - filename_0 = os.path.normpath(os.path.join(root, filename_0)) - elif renderer == "renderman": - prefix_attr = "rmanGlobals.imageFileFormat" - # NOTE: This is guessing extensions from renderman display types. - # Some of them are just framebuffers, d_texture format can be - # set in display setting. We set those now to None, but it - # should be handled more gracefully. - display_types = { - "d_deepexr": "exr", - "d_it": None, - "d_null": None, - "d_openexr": "exr", - "d_png": "png", - "d_pointcloud": "ptc", - "d_targa": "tga", - "d_texture": None, - "d_tiff": "tif" - } - - extension = display_types.get( - cmds.listConnections("rmanDefaultDisplay.displayType")[0], - "exr" - ) or "exr" - - filename_prefix = "{}/{}".format( - cmds.getAttr("rmanGlobals.imageOutputDir"), - cmds.getAttr("rmanGlobals.imageFileFormat") - ) - - renderlayer = renderlayer.split("_")[-1] - - filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', "#" * int(padding), filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', extension, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = os.path.normpath(os.path.join(root, filename_0)) - elif renderer == "redshift": - # mapping redshift extension dropdown values to strings - ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"] - extension = ext_mapping[ - cmds.getAttr("redshiftOptions.imageFormat") - ] - else: - # Get the extension, getAttr defaultRenderGlobals.imageFormat - # returns an index number. - filename_base = os.path.basename(filename_0) - extension = os.path.splitext(filename_base)[-1].strip(".") - - filename_prefix = cmds.getAttr(prefix_attr) - return {"ext": extension, - "filename_prefix": filename_prefix, - "padding": padding, - "filename_0": filename_0} - - -class MayaSubmitDeadline(pyblish.api.InstancePlugin): - """Submit available render layers to Deadline. - - Renders are submitted to a Deadline Web Service as - supplied via settings key "DEADLINE_REST_URL". - - Attributes: - use_published (bool): Use published scene to render instead of the - one in work area. - - """ - - label = "Submit to Deadline" - order = pyblish.api.IntegratorOrder + 0.1 - hosts = ["maya"] - families = ["renderlayer"] - targets = ["local"] - - use_published = True - tile_assembler_plugin = "OpenPypeTileAssembler" - asset_dependencies = False - priority = 50 - tile_priority = 50 - limit_groups = [] - jobInfo = {} - pluginInfo = {} - group = "none" - - def process(self, instance): - """Plugin entry point.""" - instance.data["toBeRenderedOn"] = "deadline" - context = instance.context - - self._instance = instance - self.payload_skeleton = copy.deepcopy(payload_skeleton_template) - - # get default deadline webservice url from deadline module - self.deadline_url = instance.context.data.get("defaultDeadline") - # if custom one is set in instance, use that - if instance.data.get("deadlineUrl"): - self.deadline_url = instance.data.get("deadlineUrl") - assert self.deadline_url, "Requires Deadline Webservice URL" - - # just using existing names from Setting - self._job_info = self.jobInfo - - self._plugin_info = self.pluginInfo - - self.limit_groups = self.limit - - context = instance.context - workspace = context.data["workspaceDir"] - anatomy = context.data['anatomy'] - instance.data["toBeRenderedOn"] = "deadline" - - filepath = None - patches = ( - context.data["project_settings"].get( - "deadline", {}).get( - "publish", {}).get( - "MayaSubmitDeadline", {}).get( - "scene_patches", {}) - ) - - # Handle render/export from published scene or not ------------------ - if self.use_published: - patched_files = [] - for i in context: - if "workfile" not in i.data["families"]: - continue - assert i.data["publish"] is True, ( - "Workfile (scene) must be published along") - template_data = i.data.get("anatomyData") - rep = i.data.get("representations")[0].get("name") - template_data["representation"] = rep - template_data["ext"] = rep - template_data["comment"] = None - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled["publish"]["path"] - filepath = os.path.normpath(template_filled) - self.log.info("Using published scene for render {}".format( - filepath)) - - if not os.path.exists(filepath): - self.log.error("published scene does not exist!") - raise - # now we need to switch scene in expected files - # because token will now point to published - # scene file and that might differ from current one - new_scene = os.path.splitext( - os.path.basename(filepath))[0] - orig_scene = os.path.splitext( - os.path.basename(context.data["currentFile"]))[0] - exp = instance.data.get("expectedFiles") - - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - new_exp = {} - for aov, files in exp[0].items(): - replaced_files = [] - for f in files: - replaced_files.append( - f.replace(orig_scene, new_scene) - ) - new_exp[aov] = replaced_files - instance.data["expectedFiles"] = [new_exp] - else: - new_exp = [] - for f in exp: - new_exp.append( - f.replace(orig_scene, new_scene) - ) - instance.data["expectedFiles"] = [new_exp] - - if instance.data.get("publishRenderMetadataFolder"): - instance.data["publishRenderMetadataFolder"] = \ - instance.data["publishRenderMetadataFolder"].replace( - orig_scene, new_scene) - self.log.info("Scene name was switched {} -> {}".format( - orig_scene, new_scene - )) - # patch workfile is needed - if filepath not in patched_files: - patched_file = self._patch_workfile(filepath, patches) - patched_files.append(patched_file) - - all_instances = [] - for result in context.data["results"]: - if (result["instance"] is not None and - result["instance"] not in all_instances): # noqa: E128 - all_instances.append(result["instance"]) - - # fallback if nothing was set - if not filepath: - self.log.warning("Falling back to workfile") - filepath = context.data["currentFile"] - - self.log.debug(filepath) - - # Gather needed data ------------------------------------------------ - default_render_file = instance.context.data.get('project_settings')\ - .get('maya')\ - .get('RenderSettings')\ - .get('default_render_image_folder') - filename = os.path.basename(filepath) - comment = context.data.get("comment", "") - dirname = os.path.join(workspace, default_render_file) - renderlayer = instance.data['setMembers'] # rs_beauty - deadline_user = context.data.get("user", getpass.getuser()) - - # Always use the original work file name for the Job name even when - # rendering is done from the published Work File. The original work - # file name is clearer because it can also have subversion strings, - # etc. which are stripped for the published file. - src_filename = os.path.basename(context.data["currentFile"]) - jobname = "%s - %s" % (src_filename, instance.name) - - # Get the variables depending on the renderer - render_variables = get_renderer_variables(renderlayer, dirname) - filename_0 = render_variables["filename_0"] - if self.use_published: - new_scene = os.path.splitext(filename)[0] - orig_scene = os.path.splitext( - os.path.basename(context.data["currentFile"]))[0] - filename_0 = render_variables["filename_0"].replace( - orig_scene, new_scene) - - output_filename_0 = filename_0 - - # this is needed because renderman handles directory and file - # prefixes separately - if self._instance.data["renderer"] == "renderman": - dirname = os.path.dirname(output_filename_0) - - # Create render folder ---------------------------------------------- - try: - # Ensure render folder exists - os.makedirs(dirname) - except OSError: - pass - - # Fill in common data to payload ------------------------------------ - payload_data = {} - payload_data["filename"] = filename - payload_data["filepath"] = filepath - payload_data["jobname"] = jobname - payload_data["deadline_user"] = deadline_user - payload_data["comment"] = comment - payload_data["output_filename_0"] = output_filename_0 - payload_data["render_variables"] = render_variables - payload_data["renderlayer"] = renderlayer - payload_data["workspace"] = workspace - payload_data["dirname"] = dirname - - self.log.info("--- Submission data:") - for k, v in payload_data.items(): - self.log.info("- {}: {}".format(k, v)) - self.log.info("-" * 20) - - frame_pattern = self.payload_skeleton["JobInfo"]["Frames"] - self.payload_skeleton["JobInfo"]["Frames"] = frame_pattern.format( - start=int(self._instance.data["frameStartHandle"]), - end=int(self._instance.data["frameEndHandle"]), - step=int(self._instance.data["byFrameStep"])) - - self.payload_skeleton["JobInfo"]["Plugin"] = self._instance.data.get( - "mayaRenderPlugin", "MayaBatch") - - self.payload_skeleton["JobInfo"]["BatchName"] = src_filename - # Job name, as seen in Monitor - self.payload_skeleton["JobInfo"]["Name"] = jobname - # Arbitrary username, for visualisation in Monitor - self.payload_skeleton["JobInfo"]["UserName"] = deadline_user - # Set job priority - self.payload_skeleton["JobInfo"]["Priority"] = \ - self._instance.data.get("priority", self.priority) - - if self.group != "none" and self.group: - self.payload_skeleton["JobInfo"]["Group"] = self.group - - if self.limit_groups: - self.payload_skeleton["JobInfo"]["LimitGroups"] = \ - ",".join(self.limit_groups) - # Optional, enable double-click to preview rendered - # frames from Deadline Monitor - self.payload_skeleton["JobInfo"]["OutputDirectory0"] = \ - os.path.dirname(output_filename_0).replace("\\", "/") - self.payload_skeleton["JobInfo"]["OutputFilename0"] = \ - output_filename_0.replace("\\", "/") - - self.payload_skeleton["JobInfo"]["Comment"] = comment - self.payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer - - self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = instance.data.get("renderSetupIncludeLights") # noqa - # Adding file dependencies. - dependencies = instance.context.data["fileDependencies"] - dependencies.append(filepath) - if self.asset_dependencies: - for dependency in dependencies: - key = "AssetDependency" + str(dependencies.index(dependency)) - self.payload_skeleton["JobInfo"][key] = dependency - - # Handle environments ----------------------------------------------- - # We need those to pass them to pype for it to set correct context - keys = [ - "FTRACK_API_KEY", - "FTRACK_API_USER", - "FTRACK_SERVER", - "OPENPYPE_SG_USER", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", - "AVALON_APP_NAME", - "OPENPYPE_DEV", - "OPENPYPE_LOG_NO_COLORS", - "OPENPYPE_VERSION" - ] - # Add mongo url if it's enabled - if instance.context.data.get("deadlinePassMongoUrl"): - keys.append("OPENPYPE_MONGO") - - environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **legacy_io.Session) - environment["OPENPYPE_LOG_NO_COLORS"] = "1" - environment["OPENPYPE_MAYA_VERSION"] = cmds.about(v=True) - # to recognize job from PYPE for turning Event On/Off - environment["OPENPYPE_RENDER_JOB"] = "1" - self.payload_skeleton["JobInfo"].update({ - "EnvironmentKeyValue%d" % index: "{key}={value}".format( - key=key, - value=environment[key] - ) for index, key in enumerate(environment) - }) - # Add options from RenderGlobals------------------------------------- - render_globals = instance.data.get("renderGlobals", {}) - self.payload_skeleton["JobInfo"].update(render_globals) - - # Submit preceding export jobs ------------------------------------- - export_job = None - assert not all(x in instance.data["families"] - for x in ['vrayscene', 'assscene']), ( - "Vray Scene and Ass Scene options are mutually exclusive") - if "vrayscene" in instance.data["families"]: - export_job = self._submit_export(payload_data, "vray") - - if "assscene" in instance.data["families"]: - export_job = self._submit_export(payload_data, "arnold") - - # Prepare main render job ------------------------------------------- - if "vrayscene" in instance.data["families"]: - payload = self._get_vray_render_payload(payload_data) - elif "assscene" in instance.data["families"]: - payload = self._get_arnold_render_payload(payload_data) - else: - payload = self._get_maya_payload(payload_data) - - # Add export job as dependency -------------------------------------- - if export_job: - payload["JobInfo"]["JobDependency0"] = export_job - - # Add list of expected files to job --------------------------------- - exp = instance.data.get("expectedFiles") - exp_index = 0 - output_filenames = {} - - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - for _aov, files in exp[0].items(): - col, rem = clique.assemble(files) - if not col and rem: - # we couldn't find any collections but have - # individual files. - assert len(rem) == 1, ("Found multiple non related files " - "to render, don't know what to do " - "with them.") - output_file = rem[0] - if not instance.data.get("tileRendering"): - payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - else: - output_file = col[0].format('{head}{padding}{tail}') - if not instance.data.get("tileRendering"): - payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - - output_filenames['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - exp_index += 1 - else: - col, rem = clique.assemble(exp) - if not col and rem: - # we couldn't find any collections but have - # individual files. - assert len(rem) == 1, ("Found multiple non related files " - "to render, don't know what to do " - "with them.") - - output_file = rem[0] - if not instance.data.get("tileRendering"): - payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - else: - output_file = col[0].format('{head}{padding}{tail}') - if not instance.data.get("tileRendering"): - payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - - output_filenames['OutputFilename' + str(exp_index)] = output_file - - plugin = payload["JobInfo"]["Plugin"] - self.log.info("using render plugin : {}".format(plugin)) - - # Store output dir for unified publisher (filesequence) - instance.data["outputDir"] = os.path.dirname(output_filename_0) - - self.preflight_check(instance) - - # add jobInfo and pluginInfo variables from Settings - payload["JobInfo"].update(self._job_info) - payload["PluginInfo"].update(self._plugin_info) - - # Prepare tiles data ------------------------------------------------ - if instance.data.get("tileRendering"): - # if we have sequence of files, we need to create tile job for - # every frame - - payload["JobInfo"]["TileJob"] = True - payload["JobInfo"]["TileJobTilesInX"] = instance.data.get("tilesX") - payload["JobInfo"]["TileJobTilesInY"] = instance.data.get("tilesY") - payload["PluginInfo"]["ImageHeight"] = instance.data.get("resolutionHeight") # noqa: E501 - payload["PluginInfo"]["ImageWidth"] = instance.data.get("resolutionWidth") # noqa: E501 - payload["PluginInfo"]["RegionRendering"] = True - - assembly_payload = { - "AuxFiles": [], - "JobInfo": { - "BatchName": payload["JobInfo"]["BatchName"], - "Frames": 1, - "Name": "{} - Tile Assembly Job".format( - payload["JobInfo"]["Name"]), - "OutputDirectory0": - payload["JobInfo"]["OutputDirectory0"].replace( - "\\", "/"), - "Plugin": self.tile_assembler_plugin, - "MachineLimit": 1 - }, - "PluginInfo": { - "CleanupTiles": 1, - "ErrorOnMissing": True - } - } - assembly_payload["JobInfo"].update(output_filenames) - assembly_payload["JobInfo"]["Priority"] = self._instance.data.get( - "tile_priority", self.tile_priority) - assembly_payload["JobInfo"]["UserName"] = deadline_user - - frame_payloads = [] - assembly_payloads = [] - - R_FRAME_NUMBER = re.compile(r".+\.(?P[0-9]+)\..+") # noqa: N806, E501 - REPL_FRAME_NUMBER = re.compile(r"(.+\.)([0-9]+)(\..+)") # noqa: N806, E501 - - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - # get files from `beauty` - files = exp[0].get("beauty") - # assembly files are used for assembly jobs as we need to put - # together all AOVs - assembly_files = list( - itertools.chain.from_iterable( - [f for _, f in exp[0].items()])) - if not files: - # if beauty doesn't exists, use first aov we found - files = exp[0].get(list(exp[0].keys())[0]) - else: - files = exp - assembly_files = files - - frame_jobs = {} - - file_index = 1 - for file in files: - frame = re.search(R_FRAME_NUMBER, file).group("frame") - new_payload = copy.deepcopy(payload) - new_payload["JobInfo"]["Name"] = \ - "{} (Frame {} - {} tiles)".format( - payload["JobInfo"]["Name"], - frame, - instance.data.get("tilesX") * instance.data.get("tilesY") # noqa: E501 - ) - self.log.info( - "... preparing job {}".format( - new_payload["JobInfo"]["Name"])) - new_payload["JobInfo"]["TileJobFrame"] = frame - - tiles_data = _format_tiles( - file, 0, - instance.data.get("tilesX"), - instance.data.get("tilesY"), - instance.data.get("resolutionWidth"), - instance.data.get("resolutionHeight"), - payload["PluginInfo"]["OutputFilePrefix"] - )[0] - new_payload["JobInfo"].update(tiles_data["JobInfo"]) - new_payload["PluginInfo"].update(tiles_data["PluginInfo"]) - - self.log.info("hashing {} - {}".format(file_index, file)) - job_hash = hashlib.sha256( - ("{}_{}".format(file_index, file)).encode("utf-8")) - frame_jobs[frame] = job_hash.hexdigest() - new_payload["JobInfo"]["ExtraInfo0"] = job_hash.hexdigest() - new_payload["JobInfo"]["ExtraInfo1"] = file - - frame_payloads.append(new_payload) - file_index += 1 - - file_index = 1 - for file in assembly_files: - frame = re.search(R_FRAME_NUMBER, file).group("frame") - new_assembly_payload = copy.deepcopy(assembly_payload) - new_assembly_payload["JobInfo"]["Name"] = \ - "{} (Frame {})".format( - assembly_payload["JobInfo"]["Name"], - frame) - new_assembly_payload["JobInfo"]["OutputFilename0"] = re.sub( - REPL_FRAME_NUMBER, - "\\1{}\\3".format("#" * len(frame)), file) - - new_assembly_payload["PluginInfo"]["Renderer"] = self._instance.data["renderer"] # noqa: E501 - new_assembly_payload["JobInfo"]["ExtraInfo0"] = frame_jobs[frame] # noqa: E501 - new_assembly_payload["JobInfo"]["ExtraInfo1"] = file - assembly_payloads.append(new_assembly_payload) - file_index += 1 - - self.log.info( - "Submitting tile job(s) [{}] ...".format(len(frame_payloads))) - - url = "{}/api/jobs".format(self.deadline_url) - tiles_count = instance.data.get("tilesX") * instance.data.get("tilesY") # noqa: E501 - - for tile_job in frame_payloads: - response = requests_post(url, json=tile_job) - if not response.ok: - raise Exception(response.text) - - job_id = response.json()["_id"] - hash = response.json()["Props"]["Ex0"] - - for assembly_job in assembly_payloads: - if assembly_job["JobInfo"]["ExtraInfo0"] == hash: - assembly_job["JobInfo"]["JobDependency0"] = job_id - - for assembly_job in assembly_payloads: - file = assembly_job["JobInfo"]["ExtraInfo1"] - # write assembly job config files - now = datetime.now() - - config_file = os.path.join( - os.path.dirname(output_filename_0), - "{}_config_{}.txt".format( - os.path.splitext(file)[0], - now.strftime("%Y_%m_%d_%H_%M_%S") - ) - ) - - try: - if not os.path.isdir(os.path.dirname(config_file)): - os.makedirs(os.path.dirname(config_file)) - except OSError: - # directory is not available - self.log.warning( - "Path is unreachable: `{}`".format( - os.path.dirname(config_file))) - - # add config file as job auxFile - assembly_job["AuxFiles"] = [config_file] - - with open(config_file, "w") as cf: - print("TileCount={}".format(tiles_count), file=cf) - print("ImageFileName={}".format(file), file=cf) - print("ImageWidth={}".format( - instance.data.get("resolutionWidth")), file=cf) - print("ImageHeight={}".format( - instance.data.get("resolutionHeight")), file=cf) - - tiles = _format_tiles( - file, 0, - instance.data.get("tilesX"), - instance.data.get("tilesY"), - instance.data.get("resolutionWidth"), - instance.data.get("resolutionHeight"), - payload["PluginInfo"]["OutputFilePrefix"] - )[1] - sorted(tiles) - for k, v in tiles.items(): - print("{}={}".format(k, v), file=cf) - - job_idx = 1 - instance.data["assemblySubmissionJobs"] = [] - for ass_job in assembly_payloads: - self.log.info("submitting assembly job {} of {}".format( - job_idx, len(assembly_payloads) - )) - self.log.debug(json.dumps(ass_job, indent=4, sort_keys=True)) - response = requests_post(url, json=ass_job) - if not response.ok: - raise Exception(response.text) - - instance.data["assemblySubmissionJobs"].append( - response.json()["_id"]) - job_idx += 1 - - instance.data["jobBatchName"] = payload["JobInfo"]["BatchName"] - self.log.info("Setting batch name on instance: {}".format( - instance.data["jobBatchName"])) - else: - # Submit job to farm -------------------------------------------- - self.log.info("Submitting ...") - self.log.debug(json.dumps(payload, indent=4, sort_keys=True)) - - # E.g. http://192.168.0.1:8082/api/jobs - url = "{}/api/jobs".format(self.deadline_url) - response = requests_post(url, json=payload) - if not response.ok: - raise Exception(response.text) - instance.data["deadlineSubmissionJob"] = response.json() - - def _get_maya_payload(self, data): - payload = copy.deepcopy(self.payload_skeleton) - - if not self.asset_dependencies: - job_info_ext = {} - - else: - job_info_ext = { - # Asset dependency to wait for at least the scene file to sync. - "AssetDependency0": data["filepath"], - } - - renderer = self._instance.data["renderer"] - - # This hack is here because of how Deadline handles Renderman version. - # it considers everything with `renderman` set as version older than - # Renderman 22, and so if we are using renderman > 21 we need to set - # renderer string on the job to `renderman22`. We will have to change - # this when Deadline releases new version handling this. - if self._instance.data["renderer"] == "renderman": - try: - from rfm2.config import cfg # noqa - except ImportError: - raise Exception("Cannot determine renderman version") - - rman_version = cfg().build_info.version() # type: str - if int(rman_version.split(".")[0]) > 22: - renderer = "renderman22" - - plugin_info = { - "SceneFile": data["filepath"], - # Output directory and filename - "OutputFilePath": data["dirname"].replace("\\", "/"), - "OutputFilePrefix": data["render_variables"]["filename_prefix"], # noqa: E501 - - # Only render layers are considered renderable in this pipeline - "UsingRenderLayers": True, - - # Render only this layer - "RenderLayer": data["renderlayer"], - - # Determine which renderer to use from the file itself - "Renderer": renderer, - - # Resolve relative references - "ProjectPath": data["workspace"], - } - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info) - return payload - - def _get_vray_export_payload(self, data): - payload = copy.deepcopy(self.payload_skeleton) - vray_settings = cmds.ls(type="VRaySettingsNode") - node = vray_settings[0] - template = cmds.getAttr("{}.vrscene_filename".format(node)) - scene, _ = os.path.splitext(data["filename"]) - first_file = self.format_vray_output_filename(scene, template) - first_file = "{}/{}".format(data["workspace"], first_file) - output = os.path.dirname(first_file) - job_info_ext = { - # Job name, as seen in Monitor - "Name": "Export {} [{}-{}]".format( - data["jobname"], - int(self._instance.data["frameStartHandle"]), - int(self._instance.data["frameEndHandle"])), - - "Plugin": self._instance.data.get( - "mayaRenderPlugin", "MayaPype"), - "FramesPerTask": self._instance.data.get("framesPerTask", 1) - } - - plugin_info_ext = { - # Renderer - "Renderer": "vray", - # Input - "SceneFile": data["filepath"], - "SkipExistingFrames": True, - "UsingRenderLayers": True, - "UseLegacyRenderLayers": True, - "RenderLayer": data["renderlayer"], - "ProjectPath": data["workspace"], - "OutputFilePath": output - } - - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info_ext) - return payload - - def _get_arnold_export_payload(self, data): - - try: - from openpype.scripts import export_maya_ass_job - except Exception: - raise AssertionError( - "Expected module 'export_maya_ass_job' to be available") - - module_path = export_maya_ass_job.__file__ - if module_path.endswith(".pyc"): - module_path = module_path[: -len(".pyc")] + ".py" - - script = os.path.normpath(module_path) - - payload = copy.deepcopy(self.payload_skeleton) - job_info_ext = { - # Job name, as seen in Monitor - "Name": "Export {} [{}-{}]".format( - data["jobname"], - int(self._instance.data["frameStartHandle"]), - int(self._instance.data["frameEndHandle"])), - - "Plugin": "Python", - "FramesPerTask": self._instance.data.get("framesPerTask", 1), - "Frames": 1 - } - - plugin_info_ext = { - "Version": "3.6", - "ScriptFile": script, - "Arguments": "", - "SingleFrameOnly": "True", - } - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info_ext) - - envs = [ - v - for k, v in payload["JobInfo"].items() - if k.startswith("EnvironmentKeyValue") - ] - - # add app name to environment - envs.append( - "AVALON_APP_NAME={}".format(os.environ.get("AVALON_APP_NAME"))) - envs.append( - "OPENPYPE_ASS_EXPORT_RENDER_LAYER={}".format(data["renderlayer"])) - envs.append( - "OPENPYPE_ASS_EXPORT_SCENE_FILE={}".format(data["filepath"])) - envs.append( - "OPENPYPE_ASS_EXPORT_OUTPUT={}".format( - payload['JobInfo']['OutputFilename0'])) - envs.append( - "OPENPYPE_ASS_EXPORT_START={}".format( - int(self._instance.data["frameStartHandle"]))) - envs.append( - "OPENPYPE_ASS_EXPORT_END={}".format( - int(self._instance.data["frameEndHandle"]))) - envs.append( - "OPENPYPE_ASS_EXPORT_STEP={}".format(1)) - - for i, e in enumerate(envs): - payload["JobInfo"]["EnvironmentKeyValue{}".format(i)] = e - return payload - - def _get_vray_render_payload(self, data): - payload = copy.deepcopy(self.payload_skeleton) - vray_settings = cmds.ls(type="VRaySettingsNode") - node = vray_settings[0] - template = cmds.getAttr("{}.vrscene_filename".format(node)) - # "vrayscene//_/" - - scene, _ = os.path.splitext(data["filename"]) - first_file = self.format_vray_output_filename(scene, template) - first_file = "{}/{}".format(data["workspace"], first_file) - job_info_ext = { - "Name": "Render {} [{}-{}]".format( - data["jobname"], - int(self._instance.data["frameStartHandle"]), - int(self._instance.data["frameEndHandle"])), - - "Plugin": "Vray", - "OverrideTaskExtraInfoNames": False, - } - - plugin_info = { - "InputFilename": first_file, - "SeparateFilesPerFrame": True, - "VRayEngine": "V-Ray", - - "Width": self._instance.data["resolutionWidth"], - "Height": self._instance.data["resolutionHeight"], - "OutputFilePath": payload["JobInfo"]["OutputDirectory0"], - "OutputFileName": payload["JobInfo"]["OutputFilename0"] - } - - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info) - return payload - - def _get_arnold_render_payload(self, data): - payload = copy.deepcopy(self.payload_skeleton) - ass_file, _ = os.path.splitext(data["output_filename_0"]) - first_file = ass_file + ".ass" - job_info_ext = { - "Name": "Render {} [{}-{}]".format( - data["jobname"], - int(self._instance.data["frameStartHandle"]), - int(self._instance.data["frameEndHandle"])), - - "Plugin": "Arnold", - "OverrideTaskExtraInfoNames": False, - } - - plugin_info = { - "ArnoldFile": first_file, - } - - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info) - return payload - - def _submit_export(self, data, format): - if format == "vray": - payload = self._get_vray_export_payload(data) - self.log.info("Submitting vrscene export job.") - elif format == "arnold": - payload = self._get_arnold_export_payload(data) - self.log.info("Submitting ass export job.") - - url = "{}/api/jobs".format(self.deadline_url) - response = requests_post(url, json=payload) - if not response.ok: - self.log.error("Submition failed!") - self.log.error(response.status_code) - self.log.error(response.content) - self.log.debug(payload) - raise RuntimeError(response.text) - - dependency = response.json() - return dependency["_id"] - - def preflight_check(self, instance): - """Ensure the startFrame, endFrame and byFrameStep are integers.""" - for key in ("frameStartHandle", "frameEndHandle", "byFrameStep"): - value = instance.data[key] - - if int(value) == value: - continue - - self.log.warning( - "%f=%d was rounded off to nearest integer" - % (value, int(value)) - ) - - def format_vray_output_filename(self, filename, template, dir=False): - """Format the expected output file of the Export job. - - Example: - /_/ - "shot010_v006/shot010_v006_CHARS/CHARS" - - Args: - instance: - filename(str): - dir(bool): - - Returns: - str - - """ - def smart_replace(string, key_values): - new_string = string - for key, value in key_values.items(): - new_string = new_string.replace(key, value) - return new_string - - # Ensure filename has no extension - file_name, _ = os.path.splitext(filename) - - layer = self._instance.data['setMembers'] - - # Reformat without tokens - output_path = smart_replace( - template, - {"": file_name, - "": layer}) - - if dir: - return output_path.replace("\\", "/") - - start_frame = int(self._instance.data["frameStartHandle"]) - filename_zero = "{}_{:04d}.vrscene".format(output_path, start_frame) - - result = filename_zero.replace("\\", "/") - - return result - - def _patch_workfile(self, file, patches): - # type: (str, dict) -> [str, None] - """Patch Maya scene. - - This will take list of patches (lines to add) and apply them to - *published* Maya scene file (that is used later for rendering). - - Patches are dict with following structure:: - { - "name": "Name of patch", - "regex": "regex of line before patch", - "line": "line to insert" - } - - Args: - file (str): File to patch. - patches (dict): Dictionary defining patches. - - Returns: - str: Patched file path or None - - """ - if os.path.splitext(file)[1].lower() != ".ma" or not patches: - return None - - compiled_regex = [re.compile(p["regex"]) for p in patches] - with open(file, "r+") as pf: - scene_data = pf.readlines() - for ln, line in enumerate(scene_data): - for i, r in enumerate(compiled_regex): - if re.match(r, line): - scene_data.insert(ln + 1, patches[i]["line"]) - pf.seek(0) - pf.writelines(scene_data) - pf.truncate() - self.log.info( - "Applied {} patch to scene.".format( - patches[i]["name"])) - return file diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index 336a56ec45..b09d2935ab 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -114,6 +114,13 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): instance.data["deadlineSubmissionJob"] = resp.json() instance.data["publishJobState"] = "Suspended" + # add to list of job Id + if not instance.data.get("bakingSubmissionJobs"): + instance.data["bakingSubmissionJobs"] = [] + + instance.data["bakingSubmissionJobs"].append( + resp.json()["_id"]) + # redefinition of families if "render.farm" in families: instance.data['family'] = 'write' diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 379953c9e4..aba505b3c6 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -296,6 +296,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): for assembly_id in instance.data.get("assemblySubmissionJobs"): payload["JobInfo"]["JobDependency{}".format(job_index)] = assembly_id # noqa: E501 job_index += 1 + elif instance.data.get("bakingSubmissionJobs"): + self.log.info("Adding baking submission jobs as dependencies...") + job_index = 0 + for assembly_id in instance.data["bakingSubmissionJobs"]: + payload["JobInfo"]["JobDependency{}".format(job_index)] = assembly_id # noqa: E501 + job_index += 1 else: payload["JobInfo"]["JobDependency0"] = job["_id"] @@ -694,9 +700,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): self.context = context self.anatomy = instance.context.data["anatomy"] - if hasattr(instance, "_log"): - data['_log'] = instance._log - asset = data.get("asset") or legacy_io.Session["AVALON_ASSET"] subset = data.get("subset") @@ -775,7 +778,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "resolutionHeight": data.get("resolutionHeight", 1080), "multipartExr": data.get("multipartExr", False), "jobBatchName": data.get("jobBatchName", ""), - "useSequenceForReview": data.get("useSequenceForReview", True) + "useSequenceForReview": data.get("useSequenceForReview", True), + # map inputVersions `ObjectId` -> `str` so json supports it + "inputVersions": list(map(str, data.get("inputVersions", []))) } # skip locking version if we are creating v01 diff --git a/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py b/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py index c2426e0d78..f0a3ddd246 100644 --- a/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py +++ b/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py @@ -3,7 +3,7 @@ import requests import pyblish.api -from openpype.lib.delivery import collect_frames +from openpype.lib import collect_frames from openpype_modules.deadline.abstract_submit_deadline import requests_get diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py b/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py index 9fca1b5391..625a3f1a28 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py @@ -56,7 +56,7 @@ def convert_value_by_type_name(value_type, value): return float(value) # Vectors will probably have more types - if value_type == "vec2f": + if value_type in ("vec2f", "float2"): return [float(item) for item in value.split(",")] # Matrix should be always have square size of element 3x3, 4x4 @@ -71,7 +71,7 @@ def convert_value_by_type_name(value_type, value): elif parts_len == 4: divisor = 2 elif parts_len == 9: - divisor == 3 + divisor = 3 elif parts_len == 16: divisor = 4 else: @@ -127,7 +127,7 @@ def convert_value_by_type_name(value_type, value): return output print(( - "MISSING IMPLEMENTATION:" + "Dev note (missing implementation):" " Unknown attrib type \"{}\". Value: {}" ).format(value_type, value)) return value @@ -183,7 +183,7 @@ def parse_oiio_xml_output(xml_string): else: value = child.text print(( - "MISSING IMPLEMENTATION:" + "Dev note (missing implementation):" " Unknown tag \"{}\". Value \"{}\"" ).format(tag_name, value)) @@ -453,7 +453,7 @@ class OpenPypeTileAssembler(DeadlinePlugin): # Swap to have input as foreground args.append("--swap") # Paste foreground to background - args.append("--paste +{}+{}".format(pos_x, pos_y)) + args.append("--paste {x:+d}{y:+d}".format(x=pos_x, y=pos_y)) args.append("-o") args.append(output_path) diff --git a/openpype/modules/ftrack/__init__.py b/openpype/modules/ftrack/__init__.py index 7261254c6f..e520f08337 100644 --- a/openpype/modules/ftrack/__init__.py +++ b/openpype/modules/ftrack/__init__.py @@ -1,9 +1,13 @@ from .ftrack_module import ( FtrackModule, - FTRACK_MODULE_DIR + FTRACK_MODULE_DIR, + + resolve_ftrack_url, ) __all__ = ( "FtrackModule", - "FTRACK_MODULE_DIR" + "FTRACK_MODULE_DIR", + + "resolve_ftrack_url", ) diff --git a/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py b/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py index 713a4d9aba..332648cd02 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py +++ b/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py @@ -1,10 +1,8 @@ import json import copy -from openpype.client import get_project -from openpype.api import ProjectSettings -from openpype.lib import create_project -from openpype.settings import SaveWarningExc +from openpype.client import get_project, create_project +from openpype.settings import ProjectSettings, SaveWarningExc from openpype_modules.ftrack.lib import ( ServerAction, diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py index 738181dc9a..e549de7ed0 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py +++ b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py @@ -18,6 +18,7 @@ from openpype.client import ( get_archived_assets, get_asset_ids_with_subsets ) +from openpype.client.operations import CURRENT_ASSET_DOC_SCHEMA from openpype.pipeline import AvalonMongoDB, schema from openpype_modules.ftrack.lib import ( @@ -35,7 +36,6 @@ from openpype_modules.ftrack.lib.avalon_sync import ( convert_to_fps, InvalidFpsValue ) -from openpype.lib import CURRENT_DOC_SCHEMAS class SyncToAvalonEvent(BaseEvent): @@ -1236,7 +1236,7 @@ class SyncToAvalonEvent(BaseEvent): "_id": mongo_id, "name": name, "type": "asset", - "schema": CURRENT_DOC_SCHEMAS["asset"], + "schema": CURRENT_ASSET_DOC_SCHEMA, "parent": proj["_id"], "data": { "ftrackId": ftrack_ent["id"], diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_cust_attrs.py b/openpype/modules/ftrack/event_handlers_user/action_create_cust_attrs.py index d04440a564..c19cfd1502 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_cust_attrs.py +++ b/openpype/modules/ftrack/event_handlers_user/action_create_cust_attrs.py @@ -18,7 +18,7 @@ from openpype_modules.ftrack.lib import ( tool_definitions_from_app_manager ) -from openpype.api import get_system_settings +from openpype.settings import get_system_settings from openpype.lib import ApplicationManager """ diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py b/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py index df914de854..7c896570b1 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py +++ b/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py @@ -1,7 +1,10 @@ import re +from openpype.pipeline.project_folders import ( + get_project_basic_paths, + create_project_folders, +) from openpype_modules.ftrack.lib import BaseAction, statics_icon -from openpype.api import get_project_basic_paths, create_project_folders class CreateProjectFolders(BaseAction): @@ -81,7 +84,7 @@ class CreateProjectFolders(BaseAction): } # Invoking OpenPype API to create the project folders - create_project_folders(basic_paths, project_name) + create_project_folders(project_name, basic_paths) self.create_ftrack_entities(basic_paths, project_entity) self.trigger_event( diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py index 79d04a7854..c543dc8834 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py @@ -11,7 +11,11 @@ from openpype.client import ( get_versions, get_representations ) -from openpype.lib import StringTemplate, TemplateUnsolved +from openpype.lib import ( + StringTemplate, + TemplateUnsolved, + format_file_size, +) from openpype.pipeline import AvalonMongoDB, Anatomy from openpype_modules.ftrack.lib import BaseAction, statics_icon @@ -134,13 +138,6 @@ class DeleteOldVersions(BaseAction): "title": self.inteface_title } - def sizeof_fmt(self, num, suffix='B'): - for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']: - if abs(num) < 1024.0: - return "%3.1f%s%s" % (num, unit, suffix) - num /= 1024.0 - return "%.1f%s%s" % (num, 'Yi', suffix) - def launch(self, session, entities, event): values = event["data"].get("values") if not values: @@ -359,7 +356,7 @@ class DeleteOldVersions(BaseAction): dir_paths, file_paths_by_dir, delete=False ) - msg = "Total size of files: " + self.sizeof_fmt(size) + msg = "Total size of files: {}".format(format_file_size(size)) self.log.warning(msg) @@ -430,7 +427,7 @@ class DeleteOldVersions(BaseAction): "message": msg } - msg = "Total size of files deleted: " + self.sizeof_fmt(size) + msg = "Total size of files deleted: {}".format(format_file_size(size)) self.log.warning(msg) diff --git a/openpype/modules/ftrack/event_handlers_user/action_delivery.py b/openpype/modules/ftrack/event_handlers_user/action_delivery.py index eec245070c..a400c8f5f0 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delivery.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delivery.py @@ -10,19 +10,19 @@ from openpype.client import ( get_versions, get_representations ) -from openpype.pipeline import Anatomy from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY from openpype_modules.ftrack.lib.custom_attributes import ( query_custom_attributes ) from openpype.lib.dateutils import get_datetime_data -from openpype.lib.delivery import ( - path_from_representation, +from openpype.pipeline import Anatomy +from openpype.pipeline.load import get_representation_path_with_anatomy +from openpype.pipeline.delivery import ( get_format_dict, check_destination_path, - process_single_file, - process_sequence + deliver_single_file, + deliver_sequence, ) @@ -580,7 +580,7 @@ class Delivery(BaseAction): if frame: repre["context"]["frame"] = len(str(frame)) * "#" - repre_path = path_from_representation(repre, anatomy) + repre_path = get_representation_path_with_anatomy(repre, anatomy) # TODO add backup solution where root of path from component # is replaced with root args = ( @@ -594,9 +594,9 @@ class Delivery(BaseAction): self.log ) if not frame: - process_single_file(*args) + deliver_single_file(*args) else: - process_sequence(*args) + deliver_sequence(*args) return self.report(report_items) diff --git a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py b/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py index e89595109e..e825198180 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py +++ b/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py @@ -1,10 +1,8 @@ import json import copy -from openpype.client import get_project -from openpype.api import ProjectSettings -from openpype.lib import create_project -from openpype.settings import SaveWarningExc +from openpype.client import get_project, create_project +from openpype.settings import ProjectSettings, SaveWarningExc from openpype_modules.ftrack.lib import ( BaseAction, diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index f99e189082..75ffd7f864 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -6,22 +6,22 @@ import platform import click from openpype.modules import OpenPypeModule -from openpype_interfaces import ( +from openpype.modules.interfaces import ( ITrayModule, IPluginPaths, - ILaunchHookPaths, ISettingsChangeListener ) from openpype.settings import SaveWarningExc +from openpype.lib import Logger FTRACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) +_URL_NOT_SET = object() class FtrackModule( OpenPypeModule, ITrayModule, IPluginPaths, - ILaunchHookPaths, ISettingsChangeListener ): name = "ftrack" @@ -30,17 +30,8 @@ class FtrackModule( ftrack_settings = settings[self.name] self.enabled = ftrack_settings["enabled"] - # Add http schema - ftrack_url = ftrack_settings["ftrack_server"].strip("/ ") - if ftrack_url: - if "http" not in ftrack_url: - ftrack_url = "https://" + ftrack_url - - # Check if "ftrack.app" is part os url - if "ftrackapp.com" not in ftrack_url: - ftrack_url = ftrack_url + ".ftrackapp.com" - - self.ftrack_url = ftrack_url + self._settings_ftrack_url = ftrack_settings["ftrack_server"] + self._ftrack_url = _URL_NOT_SET current_dir = os.path.dirname(os.path.abspath(__file__)) low_platform = platform.system().lower() @@ -72,6 +63,16 @@ class FtrackModule( self.timers_manager_connector = None self._timers_manager_module = None + def get_ftrack_url(self): + if self._ftrack_url is _URL_NOT_SET: + self._ftrack_url = resolve_ftrack_url( + self._settings_ftrack_url, + logger=self.log + ) + return self._ftrack_url + + ftrack_url = property(get_ftrack_url) + def get_global_environments(self): """Ftrack's global environments.""" return { @@ -85,7 +86,8 @@ class FtrackModule( } def get_launch_hook_paths(self): - """Implementation of `ILaunchHookPaths`.""" + """Implementation for applications launch hooks.""" + return os.path.join(FTRACK_MODULE_DIR, "launch_hooks") def modify_application_launch_arguments(self, application, env): @@ -480,6 +482,51 @@ class FtrackModule( click_group.add_command(cli_main) +def _check_ftrack_url(url): + import requests + + try: + result = requests.get(url, allow_redirects=False) + except requests.exceptions.RequestException: + return False + + if (result.status_code != 200 or "FTRACK_VERSION" not in result.headers): + return False + return True + + +def resolve_ftrack_url(url, logger=None): + """Checks if Ftrack server is responding.""" + + if logger is None: + logger = Logger.get_logger(__name__) + + url = url.strip("/ ") + if not url: + logger.error("Ftrack URL is not set!") + return None + + if not url.startswith("http"): + url = "https://" + url + + ftrack_url = None + if not url.endswith("ftrackapp.com"): + ftrackapp_url = url + ".ftrackapp.com" + if _check_ftrack_url(ftrackapp_url): + ftrack_url = ftrackapp_url + + if not ftrack_url and _check_ftrack_url(url): + ftrack_url = url + + if ftrack_url: + logger.debug("Ftrack server \"{}\" is accessible.".format(ftrack_url)) + + else: + logger.error("Ftrack server \"{}\" is not accessible!".format(url)) + + return ftrack_url + + @click.group(FtrackModule.name, help="Ftrack module related commands.") def cli_main(): pass diff --git a/openpype/modules/ftrack/ftrack_server/__init__.py b/openpype/modules/ftrack/ftrack_server/__init__.py index 9e3920b500..8e5f7c4c51 100644 --- a/openpype/modules/ftrack/ftrack_server/__init__.py +++ b/openpype/modules/ftrack/ftrack_server/__init__.py @@ -1,8 +1,6 @@ from .ftrack_server import FtrackServer -from .lib import check_ftrack_url __all__ = ( "FtrackServer", - "check_ftrack_url" ) diff --git a/openpype/modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/ftrack/ftrack_server/event_server_cli.py index 3ef7c8270a..20c5ab24a8 100644 --- a/openpype/modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/ftrack/ftrack_server/event_server_cli.py @@ -20,9 +20,11 @@ from openpype.lib import ( get_openpype_version, get_build_version, ) -from openpype_modules.ftrack import FTRACK_MODULE_DIR +from openpype_modules.ftrack import ( + FTRACK_MODULE_DIR, + resolve_ftrack_url, +) from openpype_modules.ftrack.lib import credentials -from openpype_modules.ftrack.ftrack_server.lib import check_ftrack_url from openpype_modules.ftrack.ftrack_server import socket_thread @@ -114,7 +116,7 @@ def legacy_server(ftrack_url): while True: if not ftrack_accessible: - ftrack_accessible = check_ftrack_url(ftrack_url) + ftrack_accessible = resolve_ftrack_url(ftrack_url) # Run threads only if Ftrack is accessible if not ftrack_accessible and not printed_ftrack_error: @@ -257,7 +259,7 @@ def main_loop(ftrack_url): while True: # Check if accessible Ftrack and Mongo url if not ftrack_accessible: - ftrack_accessible = check_ftrack_url(ftrack_url) + ftrack_accessible = resolve_ftrack_url(ftrack_url) if not mongo_accessible: mongo_accessible = check_mongo_url(mongo_uri) @@ -441,7 +443,7 @@ def run_event_server( os.environ["CLOCKIFY_API_KEY"] = clockify_api_key # Check url regex and accessibility - ftrack_url = check_ftrack_url(ftrack_url) + ftrack_url = resolve_ftrack_url(ftrack_url) if not ftrack_url: print('Exiting! < Please enter Ftrack server url >') return 1 diff --git a/openpype/modules/ftrack/ftrack_server/ftrack_server.py b/openpype/modules/ftrack/ftrack_server/ftrack_server.py index 8944591b71..c75b8f7172 100644 --- a/openpype/modules/ftrack/ftrack_server/ftrack_server.py +++ b/openpype/modules/ftrack/ftrack_server/ftrack_server.py @@ -7,12 +7,10 @@ import traceback import ftrack_api from openpype.lib import ( - PypeLogger, + Logger, modules_from_path ) -log = PypeLogger.get_logger(__name__) - """ # Required - Needed for connection to Ftrack FTRACK_SERVER # Ftrack server e.g. "https://myFtrack.ftrackapp.com" @@ -43,10 +41,13 @@ class FtrackServer: server.run_server() .. """ + # set Ftrack logging to Warning only - OPTIONAL ftrack_log = logging.getLogger("ftrack_api") ftrack_log.setLevel(logging.WARNING) + self.log = Logger.get_logger(__name__) + self.stopped = True self.is_running = False @@ -72,7 +73,7 @@ class FtrackServer: # Get all modules with functions modules, crashed = modules_from_path(path) for filepath, exc_info in crashed: - log.warning("Filepath load crashed {}.\n{}".format( + self.log.warning("Filepath load crashed {}.\n{}".format( filepath, traceback.format_exception(*exc_info) )) @@ -87,7 +88,7 @@ class FtrackServer: break if not register_function: - log.warning( + self.log.warning( "\"{}\" - Missing register method".format(filepath) ) continue @@ -97,7 +98,7 @@ class FtrackServer: ) if not register_functions: - log.warning(( + self.log.warning(( "There are no events with `register` function" " in registered paths: \"{}\"" ).format("| ".join(paths))) @@ -106,7 +107,7 @@ class FtrackServer: try: register_func(self.session) except Exception: - log.warning( + self.log.warning( "\"{}\" - register was not successful".format(filepath), exc_info=True ) @@ -141,7 +142,7 @@ class FtrackServer: self.session = session if load_files: if not self.handler_paths: - log.warning(( + self.log.warning(( "Paths to event handlers are not set." " Ftrack server won't launch." )) @@ -151,8 +152,8 @@ class FtrackServer: self.set_files(self.handler_paths) msg = "Registration of event handlers has finished!" - log.info(len(msg) * "*") - log.info(msg) + self.log.info(len(msg) * "*") + self.log.info(msg) # keep event_hub on session running self.session.event_hub.wait() diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py index 947dacf917..c8143f739c 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/ftrack/ftrack_server/lib.py @@ -26,45 +26,12 @@ except ImportError: from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info from openpype.client import OpenPypeMongoConnection -from openpype.api import Logger +from openpype.lib import Logger TOPIC_STATUS_SERVER = "openpype.event.server.status" TOPIC_STATUS_SERVER_RESULT = "openpype.event.server.status.result" -def check_ftrack_url(url, log_errors=True, logger=None): - """Checks if Ftrack server is responding""" - if logger is None: - logger = Logger.get_logger(__name__) - - if not url: - logger.error("Ftrack URL is not set!") - return None - - url = url.strip('/ ') - - if 'http' not in url: - if url.endswith('ftrackapp.com'): - url = 'https://' + url - else: - url = 'https://{0}.ftrackapp.com'.format(url) - try: - result = requests.get(url, allow_redirects=False) - except requests.exceptions.RequestException: - if log_errors: - logger.error("Entered Ftrack URL is not accesible!") - return False - - if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers): - if log_errors: - logger.error("Entered Ftrack URL is not accesible!") - return False - - logger.debug("Ftrack server {} is accessible.".format(url)) - - return url - - class SocketBaseEventHub(ftrack_api.event.hub.EventHub): hearbeat_msg = b"hearbeat" diff --git a/openpype/modules/ftrack/ftrack_server/socket_thread.py b/openpype/modules/ftrack/ftrack_server/socket_thread.py index f49ca5557e..3ef55f8daa 100644 --- a/openpype/modules/ftrack/ftrack_server/socket_thread.py +++ b/openpype/modules/ftrack/ftrack_server/socket_thread.py @@ -5,8 +5,8 @@ import socket import threading import traceback import subprocess -from openpype.api import Logger -from openpype.lib import get_openpype_execute_args + +from openpype.lib import get_openpype_execute_args, Logger class SocketThread(threading.Thread): @@ -16,7 +16,7 @@ class SocketThread(threading.Thread): def __init__(self, name, port, filepath, additional_args=[]): super(SocketThread, self).__init__() - self.log = Logger().get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) self.setName(name) self.name = name self.port = port diff --git a/openpype/modules/ftrack/launch_hooks/post_ftrack_changes.py b/openpype/modules/ftrack/launch_hooks/post_ftrack_changes.py index d5a95fad91..86ecffd5b8 100644 --- a/openpype/modules/ftrack/launch_hooks/post_ftrack_changes.py +++ b/openpype/modules/ftrack/launch_hooks/post_ftrack_changes.py @@ -1,7 +1,7 @@ import os import ftrack_api -from openpype.api import get_project_settings +from openpype.settings import get_project_settings from openpype.lib import PostLaunchHook diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/ftrack/lib/avalon_sync.py index f8883cefbd..935d1e85c9 100644 --- a/openpype/modules/ftrack/lib/avalon_sync.py +++ b/openpype/modules/ftrack/lib/avalon_sync.py @@ -14,11 +14,13 @@ from openpype.client import ( get_versions, get_representations ) -from openpype.api import ( - Logger, - get_anatomy_settings +from openpype.client.operations import ( + CURRENT_ASSET_DOC_SCHEMA, + CURRENT_PROJECT_SCHEMA, + CURRENT_PROJECT_CONFIG_SCHEMA, ) -from openpype.lib import ApplicationManager +from openpype.settings import get_anatomy_settings +from openpype.lib import ApplicationManager, Logger from openpype.pipeline import AvalonMongoDB, schema from .constants import CUST_ATTR_ID_KEY, FPS_KEYS @@ -32,14 +34,6 @@ import ftrack_api log = Logger.get_logger(__name__) -# Current schemas for avalon types -CURRENT_DOC_SCHEMAS = { - "project": "openpype:project-3.0", - "asset": "openpype:asset-3.0", - "config": "openpype:config-2.0" -} - - class InvalidFpsValue(Exception): pass @@ -2063,7 +2057,7 @@ class SyncEntitiesFactory: item["_id"] = new_id item["parent"] = self.avalon_project_id - item["schema"] = CURRENT_DOC_SCHEMAS["asset"] + item["schema"] = CURRENT_ASSET_DOC_SCHEMA item["data"]["visualParent"] = avalon_parent new_id_str = str(new_id) @@ -2198,8 +2192,8 @@ class SyncEntitiesFactory: project_item["_id"] = new_id project_item["parent"] = None - project_item["schema"] = CURRENT_DOC_SCHEMAS["project"] - project_item["config"]["schema"] = CURRENT_DOC_SCHEMAS["config"] + project_item["schema"] = CURRENT_PROJECT_SCHEMA + project_item["config"]["schema"] = CURRENT_PROJECT_CONFIG_SCHEMA self.ftrack_avalon_mapper[self.ft_project_id] = new_id self.avalon_ftrack_mapper[new_id] = self.ft_project_id diff --git a/openpype/modules/ftrack/lib/ftrack_base_handler.py b/openpype/modules/ftrack/lib/ftrack_base_handler.py index c0fad6aadc..c0b03f8a41 100644 --- a/openpype/modules/ftrack/lib/ftrack_base_handler.py +++ b/openpype/modules/ftrack/lib/ftrack_base_handler.py @@ -6,7 +6,7 @@ import uuid import datetime import traceback import time -from openpype.api import Logger +from openpype.lib import Logger from openpype.settings import get_project_settings import ftrack_api @@ -52,7 +52,7 @@ class BaseHandler(object): def __init__(self, session): '''Expects a ftrack_api.Session instance''' - self.log = Logger().get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) if not( isinstance(session, ftrack_api.session.Session) or isinstance(session, ftrack_server.lib.SocketSession) diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py b/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py index 5758068f86..576a7d36c4 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py +++ b/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py @@ -8,7 +8,7 @@ Provides: import pyblish.api from openpype.pipeline import legacy_io -from openpype.lib.plugin_tools import filter_profiles +from openpype.lib import filter_profiles class CollectFtrackFamily(pyblish.api.InstancePlugin): diff --git a/openpype/modules/ftrack/plugins/publish/collect_username.py b/openpype/modules/ftrack/plugins/publish/collect_username.py index a9b746ea51..798f3960a8 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_username.py +++ b/openpype/modules/ftrack/plugins/publish/collect_username.py @@ -1,5 +1,8 @@ """Loads publishing context from json and continues in publish process. +Should run before 'CollectAnatomyContextData' so the user on context is +changed before it's stored to context anatomy data or instance anatomy data. + Requires: anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11) @@ -13,7 +16,7 @@ import os import pyblish.api -class CollectUsername(pyblish.api.ContextPlugin): +class CollectUsernameForWebpublish(pyblish.api.ContextPlugin): """ Translates user email to Ftrack username. @@ -32,10 +35,8 @@ class CollectUsername(pyblish.api.ContextPlugin): hosts = ["webpublisher", "photoshop"] targets = ["remotepublish", "filespublish", "tvpaint_worker"] - _context = None - def process(self, context): - self.log.info("CollectUsername") + self.log.info("{}".format(self.__class__.__name__)) os.environ["FTRACK_API_USER"] = os.environ["FTRACK_BOT_API_USER"] os.environ["FTRACK_API_KEY"] = os.environ["FTRACK_BOT_API_KEY"] @@ -54,12 +55,14 @@ class CollectUsername(pyblish.api.ContextPlugin): return session = ftrack_api.Session(auto_connect_event_hub=False) - user = session.query("User where email like '{}'".format(user_email)) + user = session.query( + "User where email like '{}'".format(user_email) + ).first() if not user: raise ValueError( "Couldn't find user with {} email".format(user_email)) - user = user[0] + username = user.get("username") self.log.debug("Resolved ftrack username:: {}".format(username)) os.environ["FTRACK_API_USER"] = username @@ -67,5 +70,4 @@ class CollectUsername(pyblish.api.ContextPlugin): burnin_name = username if '@' in burnin_name: burnin_name = burnin_name[:burnin_name.index('@')] - os.environ["WEBPUBLISH_OPENPYPE_USERNAME"] = burnin_name context.data["user"] = burnin_name diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 20a69e060c..159e60024d 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -13,6 +13,7 @@ Provides: import os import sys import collections + import six import pyblish.api import clique @@ -21,7 +22,7 @@ import clique class IntegrateFtrackApi(pyblish.api.InstancePlugin): """ Commit components to server. """ - order = pyblish.api.IntegratorOrder+0.499 + order = pyblish.api.IntegratorOrder + 0.499 label = "Integrate Ftrack Api" families = ["ftrack"] diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 1bf4caac77..53c6e69ac0 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -9,6 +9,7 @@ from openpype.lib.transcoding import ( convert_ffprobe_fps_to_float, ) from openpype.lib.profiles_filtering import filter_profiles +from openpype.lib.transcoding import VIDEO_EXTENSIONS class IntegrateFtrackInstance(pyblish.api.InstancePlugin): @@ -35,7 +36,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): family_mapping = { "camera": "cam", "look": "look", - "mayaascii": "scene", + "mayaAscii": "scene", "model": "geo", "rig": "rig", "setdress": "setdress", @@ -54,7 +55,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): "reference": "reference" } keep_first_subset_name_for_review = True - asset_versions_status_profiles = {} + asset_versions_status_profiles = [] additional_metadata_keys = [] def process(self, instance): @@ -74,11 +75,15 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): version_number = int(instance_version) family = instance.data["family"] - family_low = family.lower() + # Perform case-insensitive family mapping + family_low = family.lower() asset_type = instance.data.get("ftrackFamily") - if not asset_type and family_low in self.family_mapping: - asset_type = self.family_mapping[family_low] + if not asset_type: + for map_family, map_value in self.family_mapping.items(): + if map_family.lower() == family_low: + asset_type = map_value + break if not asset_type: asset_type = "upload" @@ -86,15 +91,6 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): self.log.debug( "Family: {}\nMapping: {}".format(family_low, self.family_mapping) ) - - # Ignore this instance if neither "ftrackFamily" or a family mapping is - # found. - if not asset_type: - self.log.info(( - "Family \"{}\" does not match any asset type mapping" - ).format(family)) - return - status_name = self._get_asset_version_status_name(instance) # Base of component item data @@ -126,6 +122,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): review_representations = [] thumbnail_representations = [] other_representations = [] + has_movie_review = False for repre in instance_repres: self.log.debug("Representation {}".format(repre)) repre_tags = repre.get("tags") or [] @@ -134,6 +131,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): elif "ftrackreview" in repre_tags: review_representations.append(repre) + if self._is_repre_video(repre): + has_movie_review = True else: other_representations.append(repre) @@ -151,65 +150,53 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): # TODO what if there is multiple thumbnails? first_thumbnail_component = None first_thumbnail_component_repre = None - for repre in thumbnail_representations: - repre_path = self._get_repre_path(instance, repre, False) - if not repre_path: - self.log.warning( - "Published path is not set and source was removed." + + if has_movie_review: + for repre in thumbnail_representations: + repre_path = self._get_repre_path(instance, repre, False) + if not repre_path: + self.log.warning( + "Published path is not set and source was removed." + ) + continue + + # Create copy of base comp item and append it + thumbnail_item = copy.deepcopy(base_component_item) + thumbnail_item["component_path"] = repre_path + thumbnail_item["component_data"] = { + "name": "thumbnail" + } + thumbnail_item["thumbnail"] = True + + # Create copy of item before setting location + if "delete" not in repre.get("tags", []): + src_components_to_add.append(copy.deepcopy(thumbnail_item)) + # Create copy of first thumbnail + if first_thumbnail_component is None: + first_thumbnail_component_repre = repre + first_thumbnail_component = thumbnail_item + # Set location + thumbnail_item["component_location_name"] = ( + ftrack_server_location_name ) - continue - # Create copy of base comp item and append it - thumbnail_item = copy.deepcopy(base_component_item) - thumbnail_item["component_path"] = repre_path - thumbnail_item["component_data"] = { - "name": "thumbnail" - } - thumbnail_item["thumbnail"] = True - - # Create copy of item before setting location - src_components_to_add.append(copy.deepcopy(thumbnail_item)) - # Create copy of first thumbnail - if first_thumbnail_component is None: - first_thumbnail_component_repre = repre - first_thumbnail_component = thumbnail_item - # Set location - thumbnail_item["component_location_name"] = ( - ftrack_server_location_name - ) - - # Add item to component list - component_list.append(thumbnail_item) + # Add item to component list + component_list.append(thumbnail_item) if first_thumbnail_component is not None: - width = first_thumbnail_component_repre.get("width") - height = first_thumbnail_component_repre.get("height") - if not width or not height: - component_path = first_thumbnail_component["component_path"] - streams = [] - try: - streams = get_ffprobe_streams(component_path) - except Exception: - self.log.debug(( - "Failed to retrieve information about intput {}" - ).format(component_path)) + metadata = self._prepare_image_component_metadata( + first_thumbnail_component_repre, + first_thumbnail_component["component_path"] + ) - for stream in streams: - if "width" in stream and "height" in stream: - width = stream["width"] - height = stream["height"] - break - - if width and height: + if metadata: component_data = first_thumbnail_component["component_data"] - component_data["name"] = "ftrackreview-image" - component_data["metadata"] = { - "ftr_meta": json.dumps({ - "width": width, - "height": height, - "format": "image" - }) - } + component_data["metadata"] = metadata + + if review_representations: + component_data["name"] = "thumbnail" + else: + component_data["name"] = "ftrackreview-image" # Create review components # Change asset name of each new component for review @@ -218,6 +205,11 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): extended_asset_name = "" multiple_reviewable = len(review_representations) > 1 for repre in review_representations: + if not self._is_repre_video(repre) and has_movie_review: + self.log.debug("Movie repre has priority " + "from {}".format(repre)) + continue + repre_path = self._get_repre_path(instance, repre, False) if not repre_path: self.log.warning( @@ -266,12 +258,23 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): # Change location review_item["component_path"] = repre_path # Change component data - review_item["component_data"] = { - # Default component name is "main". - "name": "ftrackreview-mp4", - "metadata": self._prepare_component_metadata( + + if self._is_repre_video(repre): + component_name = "ftrackreview-mp4" + metadata = self._prepare_video_component_metadata( instance, repre, repre_path, True ) + else: + component_name = "ftrackreview-image" + metadata = self._prepare_image_component_metadata( + repre, repre_path + ) + review_item["thumbnail"] = True + + review_item["component_data"] = { + # Default component name is "main". + "name": component_name, + "metadata": metadata } if is_first_review_repre: @@ -281,7 +284,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): not_first_components.append(review_item) # Create copy of item before setting location - src_components_to_add.append(copy.deepcopy(review_item)) + if "delete" not in repre.get("tags", []): + src_components_to_add.append(copy.deepcopy(review_item)) # Set location review_item["component_location_name"] = ( @@ -427,7 +431,18 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): return matching_profile["status"] or None def _prepare_component_metadata( - self, instance, repre, component_path, is_review + self, instance, repre, component_path, is_review=None + ): + if self._is_repre_video(repre): + return self._prepare_video_component_metadata(instance, repre, + component_path, + is_review) + else: + return self._prepare_image_component_metadata(repre, + component_path) + + def _prepare_video_component_metadata( + self, instance, repre, component_path, is_review=None ): metadata = {} if "openpype_version" in self.additional_metadata_keys: @@ -439,9 +454,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): try: streams = get_ffprobe_streams(component_path) except Exception: - self.log.debug(( - "Failed to retrieve information about intput {}" - ).format(component_path)) + self.log.debug( + "Failed to retrieve information about " + "input {}".format(component_path)) # Find video streams video_streams = [ @@ -485,9 +500,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): input_framerate ) except ValueError: - self.log.warning(( - "Could not convert ffprobe fps to float \"{}\"" - ).format(input_framerate)) + self.log.warning( + "Could not convert ffprobe " + "fps to float \"{}\"".format(input_framerate)) continue stream_width = tmp_width @@ -559,3 +574,37 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): "frameRate": float(fps) }) return metadata + + def _prepare_image_component_metadata(self, repre, component_path): + width = repre.get("width") + height = repre.get("height") + if not width or not height: + streams = [] + try: + streams = get_ffprobe_streams(component_path) + except Exception: + self.log.debug( + "Failed to retrieve information " + "about input {}".format(component_path)) + + for stream in streams: + if "width" in stream and "height" in stream: + width = stream["width"] + height = stream["height"] + break + + metadata = {} + if width and height: + metadata = { + "ftr_meta": json.dumps({ + "width": width, + "height": height, + "format": "image" + }) + } + + return metadata + + def _is_repre_video(self, repre): + repre_ext = ".{}".format(repre["ext"]) + return repre_ext in VIDEO_EXTENSIONS diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 8d39baa8d7..fa7a89050c 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -156,8 +156,14 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): # CUSTOM ATTRIBUTES custom_attributes = entity_data.get('custom_attributes', []) instances = [ - i for i in self.context if i.data['asset'] in entity['name'] + instance + for instance in self.context + if instance.data.get("asset") == entity["name"] ] + + for instance in instances: + instance.data["ftrackEntity"] = entity + for key in custom_attributes: hier_attr = hier_attr_by_key.get(key) # Use simple method if key is not hierarchical @@ -187,9 +193,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): ) ) - for instance in instances: - instance.data['ftrackEntity'] = entity - try: self.session.commit() except Exception: @@ -199,13 +202,22 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): six.reraise(tp, value, tb) # TASKS + instances_by_task_name = collections.defaultdict(list) + for instance in instances: + task_name = instance.data.get("task") + if task_name: + instances_by_task_name[task_name].append(instance) + tasks = entity_data.get('tasks', []) existing_tasks = [] tasks_to_create = [] for child in entity['children']: - if child.entity_type.lower() == 'task': - existing_tasks.append(child['name'].lower()) - # existing_tasks.append(child['type']['name']) + if child.entity_type.lower() == "task": + task_name_low = child["name"].lower() + existing_tasks.append(task_name_low) + + for instance in instances_by_task_name[task_name_low]: + instance["ftrackTask"] = child for task_name in tasks: task_type = tasks[task_name]["type"] @@ -215,12 +227,15 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): tasks_to_create.append((task_name, task_type)) for task_name, task_type in tasks_to_create: - self.create_task( + task_entity = self.create_task( name=task_name, task_type=task_type, parent=entity ) + for instance in instances_by_task_name[task_name.lower()]: + instance.data["ftrackTask"] = task_entity + # Incoming links. self.create_links(project_name, entity_data, entity) try: diff --git a/openpype/modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py b/openpype/modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py index dc80bf4eb3..489f291c0f 100644 --- a/openpype/modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py +++ b/openpype/modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateFtrackAttributes(pyblish.api.InstancePlugin): @@ -34,7 +34,7 @@ class ValidateFtrackAttributes(pyblish.api.InstancePlugin): """ label = "Validate Custom Ftrack Attributes" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["ftrack"] optional = True # Ignore standalone host, because it does not have an Ftrack entity diff --git a/openpype/modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py b/openpype/modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py index 1a5da44432..78f9d135b7 100644 --- a/openpype/modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py +++ b/openpype/modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py @@ -13,10 +13,9 @@ import functools import itertools import distutils.version import hashlib -import tempfile +import appdirs import threading import atexit -import warnings import requests import requests.auth @@ -241,7 +240,7 @@ class Session(object): ) self._auto_connect_event_hub_thread = None - if auto_connect_event_hub in (None, True): + if auto_connect_event_hub is True: # Connect to event hub in background thread so as not to block main # session usage waiting for event hub connection. self._auto_connect_event_hub_thread = threading.Thread( @@ -252,9 +251,7 @@ class Session(object): # To help with migration from auto_connect_event_hub default changing # from True to False. - self._event_hub._deprecation_warning_auto_connect = ( - auto_connect_event_hub is None - ) + self._event_hub._deprecation_warning_auto_connect = False # Register to auto-close session on exit. atexit.register(WeakMethod(self.close)) @@ -271,8 +268,9 @@ class Session(object): # rebuilding types)? if schema_cache_path is not False: if schema_cache_path is None: + schema_cache_path = appdirs.user_cache_dir() schema_cache_path = os.environ.get( - 'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir() + 'FTRACK_API_SCHEMA_CACHE_PATH', schema_cache_path ) schema_cache_path = os.path.join( diff --git a/openpype/modules/ftrack/scripts/sub_event_processor.py b/openpype/modules/ftrack/scripts/sub_event_processor.py index d1e2e3aaeb..a5ce0511b8 100644 --- a/openpype/modules/ftrack/scripts/sub_event_processor.py +++ b/openpype/modules/ftrack/scripts/sub_event_processor.py @@ -4,6 +4,8 @@ import signal import socket import datetime +import ftrack_api + from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, @@ -12,17 +14,12 @@ from openpype_modules.ftrack.ftrack_server.lib import ( ) from openpype.modules import ModulesManager -from openpype.api import Logger from openpype.lib import ( + Logger, get_openpype_version, get_build_version ) - -import ftrack_api - -log = Logger().get_logger("Event processor") - subprocess_started = datetime.datetime.now() @@ -68,6 +65,8 @@ def register(session): def main(args): + log = Logger.get_logger("Event processor") + port = int(args[-1]) # Create a TCP/IP socket sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) diff --git a/openpype/modules/ftrack/scripts/sub_event_status.py b/openpype/modules/ftrack/scripts/sub_event_status.py index 3163642e3f..6c7ecb8351 100644 --- a/openpype/modules/ftrack/scripts/sub_event_status.py +++ b/openpype/modules/ftrack/scripts/sub_event_status.py @@ -15,8 +15,8 @@ from openpype_modules.ftrack.ftrack_server.lib import ( TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT ) -from openpype.api import Logger from openpype.lib import ( + Logger, is_current_version_studio_latest, is_running_from_build, get_expected_version, diff --git a/openpype/modules/ftrack/scripts/sub_event_storer.py b/openpype/modules/ftrack/scripts/sub_event_storer.py index 204cce89e8..a7e77951af 100644 --- a/openpype/modules/ftrack/scripts/sub_event_storer.py +++ b/openpype/modules/ftrack/scripts/sub_event_storer.py @@ -17,10 +17,10 @@ from openpype_modules.ftrack.ftrack_server.lib import ( ) from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info from openpype.lib import ( + Logger, get_openpype_version, get_build_version ) -from openpype.api import Logger log = Logger.get_logger("Event storer") subprocess_started = datetime.datetime.now() diff --git a/openpype/modules/ftrack/scripts/sub_legacy_server.py b/openpype/modules/ftrack/scripts/sub_legacy_server.py index e3a623c376..1f0fc1b369 100644 --- a/openpype/modules/ftrack/scripts/sub_legacy_server.py +++ b/openpype/modules/ftrack/scripts/sub_legacy_server.py @@ -5,11 +5,11 @@ import signal import threading import ftrack_api -from openpype.api import Logger +from openpype.lib import Logger from openpype.modules import ModulesManager from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer -log = Logger().get_logger("Event Server Legacy") +log = Logger.get_logger("Event Server Legacy") class TimerChecker(threading.Thread): diff --git a/openpype/modules/ftrack/scripts/sub_user_server.py b/openpype/modules/ftrack/scripts/sub_user_server.py index a3701a0950..930a2d51e2 100644 --- a/openpype/modules/ftrack/scripts/sub_user_server.py +++ b/openpype/modules/ftrack/scripts/sub_user_server.py @@ -2,6 +2,7 @@ import sys import signal import socket +from openpype.lib import Logger from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, @@ -9,9 +10,7 @@ from openpype_modules.ftrack.ftrack_server.lib import ( ) from openpype.modules import ModulesManager -from openpype.api import Logger - -log = Logger().get_logger("FtrackUserServer") +log = Logger.get_logger("FtrackUserServer") def main(args): diff --git a/openpype/modules/ftrack/tray/ftrack_tray.py b/openpype/modules/ftrack/tray/ftrack_tray.py index 2919ae22fb..e3c6e30ead 100644 --- a/openpype/modules/ftrack/tray/ftrack_tray.py +++ b/openpype/modules/ftrack/tray/ftrack_tray.py @@ -6,21 +6,18 @@ import threading from Qt import QtCore, QtWidgets, QtGui import ftrack_api -from ..ftrack_server.lib import check_ftrack_url -from ..ftrack_server import socket_thread -from ..lib import credentials -from ..ftrack_module import FTRACK_MODULE_DIR +from openpype import resources +from openpype.lib import Logger +from openpype_modules.ftrack import resolve_ftrack_url, FTRACK_MODULE_DIR +from openpype_modules.ftrack.ftrack_server import socket_thread +from openpype_modules.ftrack.lib import credentials from . import login_dialog -from openpype.api import Logger, resources - - -log = Logger().get_logger("FtrackModule") - class FtrackTrayWrapper: def __init__(self, module): self.module = module + self.log = Logger.get_logger(self.__class__.__name__) self.thread_action_server = None self.thread_socket_server = None @@ -61,19 +58,19 @@ class FtrackTrayWrapper: if validation: self.widget_login.set_credentials(ft_user, ft_api_key) self.module.set_credentials_to_env(ft_user, ft_api_key) - log.info("Connected to Ftrack successfully") + self.log.info("Connected to Ftrack successfully") self.on_login_change() return validation if not validation and ft_user and ft_api_key: - log.warning( + self.log.warning( "Current Ftrack credentials are not valid. {}: {} - {}".format( str(os.environ.get("FTRACK_SERVER")), ft_user, ft_api_key ) ) - log.info("Please sign in to Ftrack") + self.log.info("Please sign in to Ftrack") self.bool_logged = False self.show_login_widget() self.set_menu_visibility() @@ -103,7 +100,7 @@ class FtrackTrayWrapper: self.action_credentials.setIcon(self.icon_not_logged) self.action_credentials.setToolTip("Logged out") - log.info("Logged out of Ftrack") + self.log.info("Logged out of Ftrack") self.bool_logged = False self.set_menu_visibility() @@ -125,10 +122,6 @@ class FtrackTrayWrapper: ftrack_url = self.module.ftrack_url os.environ["FTRACK_SERVER"] = ftrack_url - parent_file_path = os.path.dirname( - os.path.dirname(os.path.realpath(__file__)) - ) - min_fail_seconds = 5 max_fail_count = 3 wait_time_after_max_fail = 10 @@ -153,17 +146,19 @@ class FtrackTrayWrapper: # Main loop while True: if not self.bool_action_server_running: - log.debug("Action server was pushed to stop.") + self.log.debug("Action server was pushed to stop.") break # Check if accessible Ftrack and Mongo url if not ftrack_accessible: - ftrack_accessible = check_ftrack_url(ftrack_url) + ftrack_accessible = resolve_ftrack_url(ftrack_url) # Run threads only if Ftrack is accessible if not ftrack_accessible: if not printed_ftrack_error: - log.warning("Can't access Ftrack {}".format(ftrack_url)) + self.log.warning( + "Can't access Ftrack {}".format(ftrack_url) + ) if self.thread_socket_server is not None: self.thread_socket_server.stop() @@ -190,7 +185,7 @@ class FtrackTrayWrapper: self.set_menu_visibility() elif failed_count == max_fail_count: - log.warning(( + self.log.warning(( "Action server failed {} times." " I'll try to run again {}s later" ).format( @@ -242,10 +237,10 @@ class FtrackTrayWrapper: self.thread_action_server.join() self.thread_action_server = None - log.info("Ftrack action server was forced to stop") + self.log.info("Ftrack action server was forced to stop") except Exception: - log.warning( + self.log.warning( "Error has happened during Killing action server", exc_info=True ) @@ -342,7 +337,7 @@ class FtrackTrayWrapper: self.thread_timer = None except Exception as e: - log.error("During Killing Timer event server: {0}".format(e)) + self.log.error("During Killing Timer event server: {0}".format(e)) def changed_user(self): self.stop_action_server() diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py index 14f49204ee..f92ec6bf2d 100644 --- a/openpype/modules/interfaces.py +++ b/openpype/modules/interfaces.py @@ -1,8 +1,33 @@ -from abc import abstractmethod, abstractproperty +from abc import ABCMeta, abstractmethod, abstractproperty + +import six from openpype import resources -from openpype.modules import OpenPypeInterface + +class _OpenPypeInterfaceMeta(ABCMeta): + """OpenPypeInterface meta class to print proper string.""" + + def __str__(self): + return "<'OpenPypeInterface.{}'>".format(self.__name__) + + def __repr__(self): + return str(self) + + +@six.add_metaclass(_OpenPypeInterfaceMeta) +class OpenPypeInterface: + """Base class of Interface that can be used as Mixin with abstract parts. + + This is way how OpenPype module or addon can tell OpenPype that contain + implementation for specific functionality. + + Child classes of OpenPypeInterface may be used as mixin in different + OpenPype modules which means they have to have implemented methods defined + in the interface. By default interface does not have any abstract parts. + """ + + pass class IPluginPaths(OpenPypeInterface): @@ -56,6 +81,13 @@ class ILaunchHookPaths(OpenPypeInterface): Expected result is list of paths. ["path/to/launch_hooks_dir"] + + Deprecated: + This interface is not needed since OpenPype 3.14.*. Addon just have to + implement 'get_launch_hook_paths' which can expect Application object + or nothing as argument. + + Interface class will be removed after 3.16.*. """ @abstractmethod @@ -353,8 +385,8 @@ class ISettingsChangeListener(OpenPypeInterface): pass -class IHostModule(OpenPypeInterface): - """Module which also contain a host implementation.""" +class IHostAddon(OpenPypeInterface): + """Addon which also contain a host implementation.""" @abstractproperty def host_name(self): diff --git a/openpype/modules/job_queue/module.py b/openpype/modules/job_queue/module.py index f1d7251e85..7075fcea14 100644 --- a/openpype/modules/job_queue/module.py +++ b/openpype/modules/job_queue/module.py @@ -43,7 +43,7 @@ import platform import click from openpype.modules import OpenPypeModule -from openpype.api import get_system_settings +from openpype.settings import get_system_settings class JobQueueModule(OpenPypeModule): diff --git a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py new file mode 100644 index 0000000000..c95079e042 --- /dev/null +++ b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py @@ -0,0 +1,125 @@ +import webbrowser + +from openpype.pipeline import LauncherAction +from openpype.modules import ModulesManager +from openpype.client import get_project, get_asset_by_name + + +class ShowInKitsu(LauncherAction): + name = "showinkitsu" + label = "Show in Kitsu" + icon = "external-link-square" + color = "#e0e1e1" + order = 10 + + @staticmethod + def get_kitsu_module(): + return ModulesManager().modules_by_name.get("kitsu") + + def is_compatible(self, session): + if not session.get("AVALON_PROJECT"): + return False + + return True + + def process(self, session, **kwargs): + + # Context inputs + project_name = session["AVALON_PROJECT"] + asset_name = session.get("AVALON_ASSET", None) + task_name = session.get("AVALON_TASK", None) + + project = get_project(project_name=project_name, + fields=["data.zou_id"]) + if not project: + raise RuntimeError(f"Project {project_name} not found.") + + project_zou_id = project["data"].get("zou_id") + if not project_zou_id: + raise RuntimeError(f"Project {project_name} has no " + f"connected kitsu id.") + + asset_zou_name = None + asset_zou_id = None + asset_zou_type = 'Assets' + task_zou_id = None + zou_sub_type = ['AssetType', 'Sequence'] + if asset_name: + asset_zou_name = asset_name + asset_fields = ["data.zou.id", "data.zou.type"] + if task_name: + asset_fields.append(f"data.tasks.{task_name}.zou.id") + + asset = get_asset_by_name(project_name, + asset_name=asset_name, + fields=asset_fields) + + asset_zou_data = asset["data"].get("zou") + + if asset_zou_data: + asset_zou_type = asset_zou_data["type"] + if asset_zou_type not in zou_sub_type: + asset_zou_id = asset_zou_data["id"] + else: + asset_zou_type = asset_name + + if task_name: + task_data = asset["data"]["tasks"][task_name] + task_zou_data = task_data.get("zou", {}) + if not task_zou_data: + self.log.debug(f"No zou task data for task: {task_name}") + task_zou_id = task_zou_data["id"] + + # Define URL + url = self.get_url(project_id=project_zou_id, + asset_name=asset_zou_name, + asset_id=asset_zou_id, + asset_type=asset_zou_type, + task_id=task_zou_id) + + # Open URL in webbrowser + self.log.info(f"Opening URL: {url}") + webbrowser.open(url, + # Try in new tab + new=2) + + def get_url(self, + project_id, + asset_name=None, + asset_id=None, + asset_type=None, + task_id=None): + + shots_url = {'Shots', 'Sequence', 'Shot'} + sub_type = {'AssetType', 'Sequence'} + kitsu_module = self.get_kitsu_module() + + # Get kitsu url with /api stripped + kitsu_url = kitsu_module.server_url + if kitsu_url.endswith("/api"): + kitsu_url = kitsu_url[:-len("/api")] + + sub_url = f"/productions/{project_id}" + asset_type_url = "Shots" if asset_type in shots_url else "Assets" + + if task_id: + # Go to task page + # /productions/{project-id}/{asset_type}/tasks/{task_id} + sub_url += f"/{asset_type_url}/tasks/{task_id}" + + elif asset_id: + # Go to asset or shot page + # /productions/{project-id}/assets/{entity_id} + # /productions/{project-id}/shots/{entity_id} + sub_url += f"/{asset_type_url}/{asset_id}" + + else: + # Go to project page + # Project page must end with a view + # /productions/{project-id}/assets/ + # Add search method if is a sub_type + sub_url += f"/{asset_type_url}" + if asset_type in sub_type: + sub_url += f'?search={asset_name}' + + return f"{kitsu_url}{sub_url}" diff --git a/openpype/modules/kitsu/kitsu_module.py b/openpype/modules/kitsu/kitsu_module.py index d19d14dda7..23c032715b 100644 --- a/openpype/modules/kitsu/kitsu_module.py +++ b/openpype/modules/kitsu/kitsu_module.py @@ -89,7 +89,10 @@ class KitsuModule(OpenPypeModule, IPluginPaths, ITrayAction): """Implementation of abstract method for `IPluginPaths`.""" current_dir = os.path.dirname(os.path.abspath(__file__)) - return {"publish": [os.path.join(current_dir, "plugins", "publish")]} + return { + "publish": [os.path.join(current_dir, "plugins", "publish")], + "actions": [os.path.join(current_dir, "actions")] + } def cli(self, click_group): click_group.add_command(cli_main) diff --git a/openpype/modules/kitsu/utils/credentials.py b/openpype/modules/kitsu/utils/credentials.py index 0529380d6d..adcfb07cd5 100644 --- a/openpype/modules/kitsu/utils/credentials.py +++ b/openpype/modules/kitsu/utils/credentials.py @@ -5,6 +5,7 @@ from typing import Tuple import gazu from openpype.lib.local_settings import OpenPypeSecureRegistry +from openpype.lib import emit_event def validate_credentials( @@ -32,6 +33,8 @@ def validate_credentials( except gazu.exception.AuthFailedException: return False + emit_event("kitsu.user.logged", data={"username": login}, source="kitsu") + return True diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index e03cf2b30e..2d14b38bc4 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -15,12 +15,15 @@ from openpype.client import ( get_assets, get_asset_by_id, get_asset_by_name, + create_project, ) from openpype.pipeline import AvalonMongoDB -from openpype.api import get_project_settings -from openpype.lib import create_project +from openpype.settings import get_project_settings from openpype.modules.kitsu.utils.credentials import validate_credentials +from openpype.lib import Logger + +log = Logger.get_logger(__name__) # Accepted namin pattern for OP naming_pattern = re.compile("^[a-zA-Z0-9_.]*$") @@ -112,7 +115,9 @@ def update_op_assets( item_data["frameStart"] = frame_in # Frames duration, fallback on 0 try: - frames_duration = int(item_data.pop("nb_frames", 0)) + # NOTE nb_frames is stored directly in item + # because of zou's legacy design + frames_duration = int(item.get("nb_frames", 0)) except (TypeError, ValueError): frames_duration = 0 # Frame out, fallback on frame_in + duration or project's value or 1001 @@ -166,50 +171,21 @@ def update_op_assets( # Substitute item type for general classification (assets or shots) if item_type in ["Asset", "AssetType"]: - substitute_item_type = "assets" - elif item_type in ["Episode", "Sequence"]: - substitute_item_type = "shots" - else: - substitute_item_type = f"{item_type.lower()}s" - entity_parent_folders = [ - f - for f in project_module_settings["entities_root"] - .get(substitute_item_type) - .split("/") - if f - ] + entity_root_asset_name = "Assets" + elif item_type in ["Episode", "Sequence", "Shot"]: + entity_root_asset_name = "Shots" # Root parent folder if exist visual_parent_doc_id = ( asset_doc_ids[parent_zou_id]["_id"] if parent_zou_id else None ) if visual_parent_doc_id is None: - # Find root folder docs - root_folder_docs = get_assets( + # Find root folder doc ("Assets" or "Shots") + root_folder_doc = get_asset_by_name( project_name, - asset_names=[entity_parent_folders[-1]], + asset_name=entity_root_asset_name, fields=["_id", "data.root_of"], ) - # NOTE: Not sure why it's checking for entity type? - # OP3 does not support multiple assets with same names so type - # filtering is irelevant. - # This way mimics previous implementation: - # ``` - # root_folder_doc = dbcon.find_one( - # { - # "type": "asset", - # "name": entity_parent_folders[-1], - # "data.root_of": substitute_item_type, - # }, - # ["_id"], - # ) - # ``` - root_folder_doc = None - for folder_doc in root_folder_docs: - root_of = folder_doc.get("data", {}).get("root_of") - if root_of == substitute_item_type: - root_folder_doc = folder_doc - break if root_folder_doc: visual_parent_doc_id = root_folder_doc["_id"] @@ -240,7 +216,7 @@ def update_op_assets( item_name = item["name"] # Set root folders parents - item_data["parents"] = entity_parent_folders + item_data["parents"] + item_data["parents"] = [entity_root_asset_name] + item_data["parents"] # Update 'data' different in zou DB updated_data = { @@ -259,7 +235,6 @@ def update_op_assets( }, ) ) - return assets_with_update @@ -277,8 +252,8 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: project_name = project["name"] project_doc = get_project(project_name) if not project_doc: - print(f"Creating project '{project_name}'") - project_doc = create_project(project_name, project_name, dbcon=dbcon) + log.info(f"Creating project '{project_name}'") + project_doc = create_project(project_name, project_name) # Project data and tasks project_data = project_doc["data"] or {} @@ -297,12 +272,20 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: { "code": project_code, "fps": float(project["fps"]), - "resolutionWidth": int(project["resolution"].split("x")[0]), - "resolutionHeight": int(project["resolution"].split("x")[1]), "zou_id": project["id"], } ) + match_res = re.match(r"(\d+)x(\d+)", project["resolution"]) + if match_res: + project_data["resolutionWidth"] = int(match_res.group(1)) + project_data["resolutionHeight"] = int(match_res.group(2)) + else: + log.warning( + f"'{project['resolution']}' does not match the expected" + " format for the resolution, for example: 1920x1080" + ) + return UpdateOne( {"_id": project_doc["_id"]}, { @@ -318,13 +301,13 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: ) -def sync_all_projects(login: str, password: str): +def sync_all_projects(login: str, password: str, ignore_projects: list = None): """Update all OP projects in DB with Zou data. Args: login (str): Kitsu user login password (str): Kitsu user password - + ignore_projects (list): List of unsynced project names Raises: gazu.exception.AuthFailedException: Wrong user login and/or password """ @@ -340,6 +323,8 @@ def sync_all_projects(login: str, password: str): dbcon.install() all_projects = gazu.project.all_open_projects() for project in all_projects: + if ignore_projects and project["name"] in ignore_projects: + continue sync_project_from_kitsu(dbcon, project) @@ -361,7 +346,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): if not project: project = gazu.project.get_project_by_name(project["name"]) - print(f"Synchronizing {project['name']}...") + log.info(f"Synchronizing {project['name']}...") # Get all assets from zou all_assets = gazu.asset.all_assets_for_project(project) @@ -396,54 +381,30 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): zou_ids_and_asset_docs[project["id"]] = project_doc # Create entities root folders - project_module_settings = get_project_settings(project_name)["kitsu"] - for entity_type, root in project_module_settings["entities_root"].items(): - parent_folders = root.split("/") - direct_parent_doc = None - for i, folder in enumerate(parent_folders, 1): - parent_doc = get_asset_by_name( - project_name, folder, fields=["_id", "data.root_of"] - ) - # NOTE: Not sure why it's checking for entity type? - # OP3 does not support multiple assets with same names so type - # filtering is irelevant. - # Also all of the entities could find be queried at once using - # 'get_assets'. - # This way mimics previous implementation: - # ``` - # parent_doc = dbcon.find_one( - # {"type": "asset", "name": folder, "data.root_of": entity_type} - # ) - # ``` - if ( - parent_doc - and parent_doc.get("data", {}).get("root_of") != entity_type - ): - parent_doc = None - - if not parent_doc: - direct_parent_doc = dbcon.insert_one( - { - "name": folder, - "type": "asset", - "schema": "openpype:asset-3.0", - "data": { - "root_of": entity_type, - "parents": parent_folders[:i], - "visualParent": direct_parent_doc.inserted_id - if direct_parent_doc - else None, - "tasks": {}, - }, - } - ) + to_insert = [ + { + "name": r, + "type": "asset", + "schema": "openpype:asset-3.0", + "data": { + "root_of": r, + "tasks": {}, + }, + } + for r in ["Assets", "Shots"] + if not get_asset_by_name( + project_name, r, fields=["_id", "data.root_of"] + ) + ] # Create - to_insert = [ - create_op_asset(item) - for item in all_entities - if item["id"] not in zou_ids_and_asset_docs.keys() - ] + to_insert.extend( + [ + create_op_asset(item) + for item in all_entities + if item["id"] not in zou_ids_and_asset_docs.keys() + ] + ) if to_insert: # Insert doc in DB dbcon.insert_many(to_insert) diff --git a/openpype/modules/kitsu/utils/update_zou_with_op.py b/openpype/modules/kitsu/utils/update_zou_with_op.py index da924aa5ee..39baf31b93 100644 --- a/openpype/modules/kitsu/utils/update_zou_with_op.py +++ b/openpype/modules/kitsu/utils/update_zou_with_op.py @@ -12,7 +12,7 @@ from openpype.client import ( get_assets, ) from openpype.pipeline import AvalonMongoDB -from openpype.api import get_project_settings +from openpype.settings import get_project_settings from openpype.modules.kitsu.utils.credentials import validate_credentials diff --git a/openpype/modules/log_viewer/log_view_module.py b/openpype/modules/log_viewer/log_view_module.py index 14be6b392e..da1628b71f 100644 --- a/openpype/modules/log_viewer/log_view_module.py +++ b/openpype/modules/log_viewer/log_view_module.py @@ -1,4 +1,3 @@ -from openpype.api import Logger from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayModule diff --git a/openpype/modules/log_viewer/tray/models.py b/openpype/modules/log_viewer/tray/models.py index aea62c381b..d369ffeb64 100644 --- a/openpype/modules/log_viewer/tray/models.py +++ b/openpype/modules/log_viewer/tray/models.py @@ -1,6 +1,6 @@ import collections from Qt import QtCore, QtGui -from openpype.lib.log import PypeLogger +from openpype.lib import Logger class LogModel(QtGui.QStandardItemModel): @@ -41,14 +41,14 @@ class LogModel(QtGui.QStandardItemModel): self.dbcon = None # Crash if connection is not possible to skip this module - if not PypeLogger.initialized: - PypeLogger.initialize() + if not Logger.initialized: + Logger.initialize() - connection = PypeLogger.get_log_mongo_connection() + connection = Logger.get_log_mongo_connection() if connection: - PypeLogger.bootstrap_mongo_log() - database = connection[PypeLogger.log_database_name] - self.dbcon = database[PypeLogger.log_collection_name] + Logger.bootstrap_mongo_log() + database = connection[Logger.log_database_name] + self.dbcon = database[Logger.log_collection_name] def headerData(self, section, orientation, role): if ( diff --git a/openpype/modules/royalrender/api.py b/openpype/modules/royalrender/api.py index ed9e71f240..de1dba8724 100644 --- a/openpype/modules/royalrender/api.py +++ b/openpype/modules/royalrender/api.py @@ -5,13 +5,10 @@ import os from openpype.settings import get_project_settings from openpype.lib.local_settings import OpenPypeSettingsRegistry -from openpype.lib import PypeLogger, run_subprocess +from openpype.lib import Logger, run_subprocess from .rr_job import RRJob, SubmitFile, SubmitterParameter -log = PypeLogger.get_logger("RoyalRender") - - class Api: _settings = None @@ -19,6 +16,7 @@ class Api: RR_SUBMIT_API = 2 def __init__(self, settings, project=None): + self.log = Logger.get_logger("RoyalRender") self._settings = settings self._initialize_rr(project) @@ -137,7 +135,7 @@ class Api: rr_console += ".exe" args = [rr_console, file] - run_subprocess(" ".join(args), logger=log) + run_subprocess(" ".join(args), logger=self.log) def _submit_using_api(self, file): # type: (SubmitFile) -> None @@ -159,11 +157,11 @@ class Api: rr_server = tcp.getRRServer() if len(rr_server) == 0: - log.info("Got RR IP address {}".format(rr_server)) + self.log.info("Got RR IP address {}".format(rr_server)) # TODO: Port is hardcoded in RR? If not, move it to Settings if not tcp.setServer(rr_server, 7773): - log.error( + self.log.error( "Can not set RR server: {}".format(tcp.errorMessage())) raise RoyalRenderException(tcp.errorMessage()) diff --git a/openpype/modules/settings_action.py b/openpype/modules/settings_action.py index 2b4b51e3ad..1e7eca4dec 100644 --- a/openpype/modules/settings_action.py +++ b/openpype/modules/settings_action.py @@ -23,6 +23,11 @@ class SettingsAction(OpenPypeModule, ITrayAction): """Initialization in tray implementation of ITrayAction.""" self.create_settings_window() + def tray_exit(self): + # Close settings UI to remove settings lock + if self.settings_window: + self.settings_window.close() + def on_action_trigger(self): """Implementation for action trigger of ITrayAction.""" self.show_settings_window() diff --git a/openpype/modules/shotgrid/lib/settings.py b/openpype/modules/shotgrid/lib/settings.py index 924099f04b..5b0b728f55 100644 --- a/openpype/modules/shotgrid/lib/settings.py +++ b/openpype/modules/shotgrid/lib/settings.py @@ -1,4 +1,4 @@ -from openpype.api import get_system_settings, get_project_settings +from openpype.settings import get_system_settings, get_project_settings from openpype.modules.shotgrid.lib.const import MODULE_NAME diff --git a/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py b/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py index c14c980e2a..48b320e15e 100644 --- a/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py +++ b/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateShotgridUser(pyblish.api.ContextPlugin): @@ -8,7 +8,7 @@ class ValidateShotgridUser(pyblish.api.ContextPlugin): """ label = "Validate Shotgrid User" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder def process(self, context): sg = context.data.get("shotgridSession") diff --git a/openpype/modules/shotgrid/shotgrid_module.py b/openpype/modules/shotgrid/shotgrid_module.py index 5644f0c35f..281c6fdcad 100644 --- a/openpype/modules/shotgrid/shotgrid_module.py +++ b/openpype/modules/shotgrid/shotgrid_module.py @@ -3,7 +3,6 @@ import os from openpype_interfaces import ( ITrayModule, IPluginPaths, - ILaunchHookPaths, ) from openpype.modules import OpenPypeModule @@ -11,9 +10,7 @@ from openpype.modules import OpenPypeModule SHOTGRID_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) -class ShotgridModule( - OpenPypeModule, ITrayModule, IPluginPaths, ILaunchHookPaths -): +class ShotgridModule(OpenPypeModule, ITrayModule, IPluginPaths): leecher_manager_url = None name = "shotgrid" enabled = False diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index c3b288f0cd..643e55915b 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -95,13 +95,15 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): Reviews might be large, so allow only adding link to message instead of uploading only. """ + fill_data = copy.deepcopy(instance.context.data["anatomyData"]) + username = fill_data.get("user") fill_pairs = [ ("asset", instance.data.get("asset", fill_data.get("asset"))), ("subset", instance.data.get("subset", fill_data.get("subset"))), - ("username", instance.data.get("username", - fill_data.get("username"))), + ("user", username), + ("username", username), ("app", instance.data.get("app", fill_data.get("app"))), ("family", instance.data.get("family", fill_data.get("family"))), ("version", str(instance.data.get("version", @@ -110,13 +112,19 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): if review_path: fill_pairs.append(("review_filepath", review_path)) - task_data = instance.data.get("task") - if not task_data: - task_data = fill_data.get("task") - for key, value in task_data.items(): - fill_key = "task[{}]".format(key) - fill_pairs.append((fill_key, value)) - fill_pairs.append(("task", task_data["name"])) + task_data = fill_data.get("task") + if task_data: + if ( + "{task}" in message_templ + or "{Task}" in message_templ + or "{TASK}" in message_templ + ): + fill_pairs.append(("task", task_data["name"])) + + else: + for key, value in task_data.items(): + fill_key = "task[{}]".format(key) + fill_pairs.append((fill_key, value)) self.log.debug("fill_pairs ::{}".format(fill_pairs)) multiple_case_variants = prepare_template_data(fill_pairs) diff --git a/openpype/modules/slack/slack_module.py b/openpype/modules/slack/slack_module.py index 9b2976d766..499c1c19ce 100644 --- a/openpype/modules/slack/slack_module.py +++ b/openpype/modules/slack/slack_module.py @@ -1,14 +1,11 @@ import os from openpype.modules import OpenPypeModule -from openpype_interfaces import ( - IPluginPaths, - ILaunchHookPaths -) +from openpype.modules.interfaces import IPluginPaths SLACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) -class SlackIntegrationModule(OpenPypeModule, IPluginPaths, ILaunchHookPaths): +class SlackIntegrationModule(OpenPypeModule, IPluginPaths): """Allows sending notification to Slack channels during publishing.""" name = "slack" @@ -18,7 +15,8 @@ class SlackIntegrationModule(OpenPypeModule, IPluginPaths, ILaunchHookPaths): self.enabled = slack_settings["enabled"] def get_launch_hook_paths(self): - """Implementation of `ILaunchHookPaths`.""" + """Implementation for applications launch hooks.""" + return os.path.join(SLACK_MODULE_DIR, "launch_hooks") def get_plugin_paths(self): diff --git a/openpype/modules/sync_server/providers/abstract_provider.py b/openpype/modules/sync_server/providers/abstract_provider.py index 8c2fe1cad9..e11a8ba71e 100644 --- a/openpype/modules/sync_server/providers/abstract_provider.py +++ b/openpype/modules/sync_server/providers/abstract_provider.py @@ -1,8 +1,8 @@ import abc import six -from openpype.api import Logger +from openpype.lib import Logger -log = Logger().get_logger("SyncServer") +log = Logger.get_logger("SyncServer") @six.add_metaclass(abc.ABCMeta) @@ -10,6 +10,8 @@ class AbstractProvider: CODE = '' LABEL = '' + _log = None + def __init__(self, project_name, site_name, tree=None, presets=None): self.presets = None self.active = False @@ -19,6 +21,12 @@ class AbstractProvider: super(AbstractProvider, self).__init__() + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + @abc.abstractmethod def is_active(self): """ @@ -199,11 +207,11 @@ class AbstractProvider: path = anatomy.fill_root(path) except KeyError: msg = "Error in resolving local root from anatomy" - log.error(msg) + self.log.error(msg) raise ValueError(msg) except IndexError: msg = "Path {} contains unfillable placeholder" - log.error(msg) + self.log.error(msg) raise ValueError(msg) return path diff --git a/openpype/modules/sync_server/providers/dropbox.py b/openpype/modules/sync_server/providers/dropbox.py index 89d6990841..e026ae7ef6 100644 --- a/openpype/modules/sync_server/providers/dropbox.py +++ b/openpype/modules/sync_server/providers/dropbox.py @@ -2,12 +2,9 @@ import os import dropbox -from openpype.api import Logger from .abstract_provider import AbstractProvider from ..utils import EditableScopes -log = Logger().get_logger("SyncServer") - class DropboxHandler(AbstractProvider): CODE = 'dropbox' @@ -20,26 +17,26 @@ class DropboxHandler(AbstractProvider): self.dbx = None if not self.presets: - log.info( + self.log.info( "Sync Server: There are no presets for {}.".format(site_name) ) return if not self.presets["enabled"]: - log.debug("Sync Server: Site {} not enabled for {}.". + self.log.debug("Sync Server: Site {} not enabled for {}.". format(site_name, project_name)) return token = self.presets.get("token", "") if not token: msg = "Sync Server: No access token for dropbox provider" - log.info(msg) + self.log.info(msg) return team_folder_name = self.presets.get("team_folder_name", "") if not team_folder_name: msg = "Sync Server: No team folder name for dropbox provider" - log.info(msg) + self.log.info(msg) return acting_as_member = self.presets.get("acting_as_member", "") @@ -47,7 +44,7 @@ class DropboxHandler(AbstractProvider): msg = ( "Sync Server: No acting member for dropbox provider" ) - log.info(msg) + self.log.info(msg) return try: @@ -55,7 +52,7 @@ class DropboxHandler(AbstractProvider): token, acting_as_member, team_folder_name ) except Exception as e: - log.info("Could not establish dropbox object: {}".format(e)) + self.log.info("Could not establish dropbox object: {}".format(e)) return super(AbstractProvider, self).__init__() @@ -448,7 +445,7 @@ class DropboxHandler(AbstractProvider): path = anatomy.fill_root(path) except KeyError: msg = "Error in resolving local root from anatomy" - log.error(msg) + self.log.error(msg) raise ValueError(msg) return path diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/sync_server/providers/gdrive.py index bef707788b..9a3ce89cf5 100644 --- a/openpype/modules/sync_server/providers/gdrive.py +++ b/openpype/modules/sync_server/providers/gdrive.py @@ -5,12 +5,12 @@ import sys import six import platform -from openpype.api import Logger -from openpype.api import get_system_settings +from openpype.lib import Logger +from openpype.settings import get_system_settings from .abstract_provider import AbstractProvider from ..utils import time_function, ResumableError -log = Logger().get_logger("SyncServer") +log = Logger.get_logger("GDriveHandler") try: from googleapiclient.discovery import build @@ -69,13 +69,17 @@ class GDriveHandler(AbstractProvider): self.presets = presets if not self.presets: - log.info("Sync Server: There are no presets for {}.". - format(site_name)) + self.log.info( + "Sync Server: There are no presets for {}.".format(site_name) + ) return if not self.presets["enabled"]: - log.debug("Sync Server: Site {} not enabled for {}.". - format(site_name, project_name)) + self.log.debug( + "Sync Server: Site {} not enabled for {}.".format( + site_name, project_name + ) + ) return current_platform = platform.system().lower() @@ -85,20 +89,22 @@ class GDriveHandler(AbstractProvider): if not cred_path: msg = "Sync Server: Please, fill the credentials for gdrive "\ "provider for platform '{}' !".format(current_platform) - log.info(msg) + self.log.info(msg) return try: cred_path = cred_path.format(**os.environ) except KeyError as e: - log.info("Sync Server: The key(s) {} does not exist in the " - "environment variables".format(" ".join(e.args))) + self.log.info(( + "Sync Server: The key(s) {} does not exist in the " + "environment variables" + ).format(" ".join(e.args))) return if not os.path.exists(cred_path): msg = "Sync Server: No credentials for gdrive provider " + \ "for '{}' on path '{}'!".format(site_name, cred_path) - log.info(msg) + self.log.info(msg) return self.service = None @@ -318,7 +324,7 @@ class GDriveHandler(AbstractProvider): fields='id') media.stream() - log.debug("Start Upload! {}".format(source_path)) + self.log.debug("Start Upload! {}".format(source_path)) last_tick = status = response = None status_val = 0 while response is None: @@ -331,7 +337,7 @@ class GDriveHandler(AbstractProvider): if not last_tick or \ time.time() - last_tick >= server.LOG_PROGRESS_SEC: last_tick = time.time() - log.debug("Uploaded %d%%." % + self.log.debug("Uploaded %d%%." % int(status_val * 100)) server.update_db(project_name=project_name, new_file_id=None, @@ -350,8 +356,9 @@ class GDriveHandler(AbstractProvider): if 'has not granted' in ex._get_reason().strip(): raise PermissionError(ex._get_reason().strip()) - log.warning("Forbidden received, hit quota. " - "Injecting 60s delay.") + self.log.warning( + "Forbidden received, hit quota. Injecting 60s delay." + ) time.sleep(60) return False raise @@ -417,7 +424,7 @@ class GDriveHandler(AbstractProvider): if not last_tick or \ time.time() - last_tick >= server.LOG_PROGRESS_SEC: last_tick = time.time() - log.debug("Downloaded %d%%." % + self.log.debug("Downloaded %d%%." % int(status_val * 100)) server.update_db(project_name=project_name, new_file_id=None, @@ -629,9 +636,9 @@ class GDriveHandler(AbstractProvider): ["gdrive"] ) except KeyError: - log.info(("Sync Server: There are no presets for Gdrive " + - "provider."). - format(str(provider_presets))) + log.info(( + "Sync Server: There are no presets for Gdrive provider." + ).format(str(provider_presets))) return return provider_presets @@ -704,7 +711,7 @@ class GDriveHandler(AbstractProvider): roots[self.MY_DRIVE_STR] = self.service.files() \ .get(fileId='root').execute() except errors.HttpError: - log.warning("HttpError in sync loop, " + self.log.warning("HttpError in sync loop, " "trying next loop", exc_info=True) raise ResumableError @@ -727,7 +734,7 @@ class GDriveHandler(AbstractProvider): Returns: (dictionary) path as a key, folder id as a value """ - log.debug("build_tree len {}".format(len(folders))) + self.log.debug("build_tree len {}".format(len(folders))) if not self.root: # build only when necessary, could be expensive self.root = self._prepare_root_info() @@ -779,9 +786,9 @@ class GDriveHandler(AbstractProvider): loop_cnt += 1 if len(no_parents_yet) > 0: - log.debug("Some folders path are not resolved {}". + self.log.debug("Some folders path are not resolved {}". format(no_parents_yet)) - log.debug("Remove deleted folders from trash.") + self.log.debug("Remove deleted folders from trash.") return tree diff --git a/openpype/modules/sync_server/providers/local_drive.py b/openpype/modules/sync_server/providers/local_drive.py index 01bc891d08..8f55dc529b 100644 --- a/openpype/modules/sync_server/providers/local_drive.py +++ b/openpype/modules/sync_server/providers/local_drive.py @@ -4,7 +4,7 @@ import shutil import threading import time -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import Anatomy from .abstract_provider import AbstractProvider diff --git a/openpype/modules/sync_server/providers/sftp.py b/openpype/modules/sync_server/providers/sftp.py index 302ffae3e6..40f11cb9dd 100644 --- a/openpype/modules/sync_server/providers/sftp.py +++ b/openpype/modules/sync_server/providers/sftp.py @@ -4,10 +4,10 @@ import time import threading import platform -from openpype.api import Logger -from openpype.api import get_system_settings +from openpype.lib import Logger +from openpype.settings import get_system_settings from .abstract_provider import AbstractProvider -log = Logger().get_logger("SyncServer") +log = Logger.get_logger("SyncServer-SFTPHandler") pysftp = None try: @@ -43,8 +43,9 @@ class SFTPHandler(AbstractProvider): self.presets = presets if not self.presets: - log.warning("Sync Server: There are no presets for {}.". - format(site_name)) + self.log.warning( + "Sync Server: There are no presets for {}.".format(site_name) + ) return # store to instance for reconnect @@ -423,7 +424,7 @@ class SFTPHandler(AbstractProvider): return pysftp.Connection(**conn_params) except (paramiko.ssh_exception.SSHException, pysftp.exceptions.ConnectionException): - log.warning("Couldn't connect", exc_info=True) + self.log.warning("Couldn't connect", exc_info=True) def _mark_progress(self, project_name, file, representation, server, site, source_path, target_path, direction): @@ -445,7 +446,7 @@ class SFTPHandler(AbstractProvider): time.time() - last_tick >= server.LOG_PROGRESS_SEC: status_val = target_file_size / source_file_size last_tick = time.time() - log.debug(direction + "ed %d%%." % int(status_val * 100)) + self.log.debug(direction + "ed %d%%." % int(status_val * 100)) server.update_db(project_name=project_name, new_file_id=None, file=file, diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 97538fcd4e..8b11055e65 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -6,14 +6,11 @@ import concurrent.futures from concurrent.futures._base import CancelledError from .providers import lib -from openpype.lib import PypeLogger +from openpype.lib import Logger from .utils import SyncStatus, ResumableError -log = PypeLogger().get_logger("SyncServer") - - async def upload(module, project_name, file, representation, provider_name, remote_site_name, tree=None, preset=None): """ @@ -238,6 +235,7 @@ class SyncServerThread(threading.Thread): Stopped when tray is closed. """ def __init__(self, module): + self.log = Logger.get_logger(self.__class__.__name__) super(SyncServerThread, self).__init__() self.module = module self.loop = None @@ -249,17 +247,17 @@ class SyncServerThread(threading.Thread): self.is_running = True try: - log.info("Starting Sync Server") + self.log.info("Starting Sync Server") self.loop = asyncio.new_event_loop() # create new loop for thread asyncio.set_event_loop(self.loop) self.loop.set_default_executor(self.executor) asyncio.ensure_future(self.check_shutdown(), loop=self.loop) asyncio.ensure_future(self.sync_loop(), loop=self.loop) - log.info("Sync Server Started") + self.log.info("Sync Server Started") self.loop.run_forever() except Exception: - log.warning( + self.log.warning( "Sync Server service has failed", exc_info=True ) finally: @@ -379,8 +377,9 @@ class SyncServerThread(threading.Thread): )) processed_file_path.add(file_path) - log.debug("Sync tasks count {}". - format(len(task_files_to_process))) + self.log.debug("Sync tasks count {}".format( + len(task_files_to_process) + )) files_created = await asyncio.gather( *task_files_to_process, return_exceptions=True) @@ -399,28 +398,31 @@ class SyncServerThread(threading.Thread): error) duration = time.time() - start_time - log.debug("One loop took {:.2f}s".format(duration)) + self.log.debug("One loop took {:.2f}s".format(duration)) delay = self.module.get_loop_delay(project_name) - log.debug("Waiting for {} seconds to new loop".format(delay)) + self.log.debug( + "Waiting for {} seconds to new loop".format(delay) + ) self.timer = asyncio.create_task(self.run_timer(delay)) await asyncio.gather(self.timer) except ConnectionResetError: - log.warning("ConnectionResetError in sync loop, " - "trying next loop", - exc_info=True) + self.log.warning( + "ConnectionResetError in sync loop, trying next loop", + exc_info=True) except CancelledError: # just stopping server pass except ResumableError: - log.warning("ResumableError in sync loop, " - "trying next loop", - exc_info=True) + self.log.warning( + "ResumableError in sync loop, trying next loop", + exc_info=True) except Exception: self.stop() - log.warning("Unhandled except. in sync loop, stopping server", - exc_info=True) + self.log.warning( + "Unhandled except. in sync loop, stopping server", + exc_info=True) def stop(self): """Sets is_running flag to false, 'check_shutdown' shuts server down""" @@ -433,16 +435,17 @@ class SyncServerThread(threading.Thread): while self.is_running: if self.module.long_running_tasks: task = self.module.long_running_tasks.pop() - log.info("starting long running") + self.log.info("starting long running") await self.loop.run_in_executor(None, task["func"]) - log.info("finished long running") + self.log.info("finished long running") self.module.projects_processed.remove(task["project_name"]) await asyncio.sleep(0.5) tasks = [task for task in asyncio.all_tasks() if task is not asyncio.current_task()] list(map(lambda task: task.cancel(), tasks)) # cancel all the tasks results = await asyncio.gather(*tasks, return_exceptions=True) - log.debug(f'Finished awaiting cancelled tasks, results: {results}...') + self.log.debug( + f'Finished awaiting cancelled tasks, results: {results}...') await self.loop.shutdown_asyncgens() # to really make sure everything else has time to stop self.executor.shutdown(wait=True) @@ -455,29 +458,32 @@ class SyncServerThread(threading.Thread): def reset_timer(self): """Called when waiting for next loop should be skipped""" - log.debug("Resetting timer") + self.log.debug("Resetting timer") if self.timer: self.timer.cancel() self.timer = None def _working_sites(self, project_name): if self.module.is_project_paused(project_name): - log.debug("Both sites same, skipping") + self.log.debug("Both sites same, skipping") return None, None local_site = self.module.get_active_site(project_name) remote_site = self.module.get_remote_site(project_name) if local_site == remote_site: - log.debug("{}-{} sites same, skipping".format(local_site, - remote_site)) + self.log.debug("{}-{} sites same, skipping".format( + local_site, remote_site)) return None, None configured_sites = _get_configured_sites(self.module, project_name) if not all([local_site in configured_sites, remote_site in configured_sites]): - log.debug("Some of the sites {} - {} is not ".format(local_site, - remote_site) + - "working properly") + self.log.debug( + "Some of the sites {} - {} is not working properly".format( + local_site, remote_site + ) + ) + return None, None return local_site, remote_site diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index c7f9484e55..a478faa9ef 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1,11 +1,16 @@ import os -from bson.objectid import ObjectId +import sys +import time from datetime import datetime import threading import platform import copy +import signal from collections import deque, defaultdict +import click +from bson.objectid import ObjectId + from openpype.client import get_projects from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayModule @@ -13,7 +18,7 @@ from openpype.settings import ( get_project_settings, get_system_settings, ) -from openpype.lib import PypeLogger, get_local_site_id +from openpype.lib import Logger, get_local_site_id from openpype.pipeline import AvalonMongoDB, Anatomy from openpype.settings.lib import ( get_default_anatomy_settings, @@ -28,7 +33,7 @@ from .utils import time_function, SyncStatus, SiteAlreadyPresentError from openpype.client import get_representations, get_representation_by_id -log = PypeLogger.get_logger("SyncServer") +log = Logger.get_logger("SyncServer") class SyncServerModule(OpenPypeModule, ITrayModule): @@ -462,7 +467,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): representation_id (string): MongoDB objectId value site_name (string): 'gdrive', 'studio' etc. """ - log.info("Pausing SyncServer for {}".format(representation_id)) + self.log.info("Pausing SyncServer for {}".format(representation_id)) self._paused_representations.add(representation_id) self.reset_site_on_representation(project_name, representation_id, site_name=site_name, pause=True) @@ -479,7 +484,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): representation_id (string): MongoDB objectId value site_name (string): 'gdrive', 'studio' etc. """ - log.info("Unpausing SyncServer for {}".format(representation_id)) + self.log.info("Unpausing SyncServer for {}".format(representation_id)) try: self._paused_representations.remove(representation_id) except KeyError: @@ -518,7 +523,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Args: project_name (string): project_name name """ - log.info("Pausing SyncServer for {}".format(project_name)) + self.log.info("Pausing SyncServer for {}".format(project_name)) self._paused_projects.add(project_name) def unpause_project(self, project_name): @@ -530,7 +535,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Args: project_name (string): """ - log.info("Unpausing SyncServer for {}".format(project_name)) + self.log.info("Unpausing SyncServer for {}".format(project_name)) try: self._paused_projects.remove(project_name) except KeyError: @@ -558,14 +563,14 @@ class SyncServerModule(OpenPypeModule, ITrayModule): It won't check anything, not uploading/downloading... """ - log.info("Pausing SyncServer") + self.log.info("Pausing SyncServer") self._paused = True def unpause_server(self): """ Unpause server """ - log.info("Unpausing SyncServer") + self.log.info("Unpausing SyncServer") self._paused = False def is_paused(self): @@ -876,7 +881,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): # val = val[platform.system().lower()] # except KeyError: # st = "{}'s field value {} should be".format(key, val) # noqa: E501 - # log.error(st + " multiplatform dict") + # self.log.error(st + " multiplatform dict") # # item["namespace"] = item["namespace"].replace('{site}', # site_name) @@ -1148,7 +1153,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if self.enabled: self.sync_server_thread.start() else: - log.info("No presets or active providers. " + + self.log.info("No presets or active providers. " + "Synchronization not possible.") def tray_exit(self): @@ -1166,12 +1171,12 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if not self.is_running: return try: - log.info("Stopping sync server server") + self.log.info("Stopping sync server server") self.sync_server_thread.is_running = False self.sync_server_thread.stop() - log.info("Sync server stopped") + self.log.info("Sync server stopped") except Exception: - log.warning( + self.log.warning( "Error has happened during Killing sync server", exc_info=True ) @@ -1256,7 +1261,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): sync_project_settings[project_name] = proj_settings if not sync_project_settings: - log.info("No enabled and configured projects for sync.") + self.log.info("No enabled and configured projects for sync.") return sync_project_settings def get_sync_project_setting(self, project_name, exclude_locals=False, @@ -1387,7 +1392,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Returns: (list) of dictionaries """ - log.debug("Check representations for : {}".format(project_name)) + self.log.debug("Check representations for : {}".format(project_name)) self.connection.Session["AVALON_PROJECT"] = project_name # retry_cnt - number of attempts to sync specific file before giving up retries_arr = self._get_retries_arr(project_name) @@ -1466,9 +1471,10 @@ class SyncServerModule(OpenPypeModule, ITrayModule): }}, {"$sort": {'priority': -1, '_id': 1}}, ] - log.debug("active_site:{} - remote_site:{}".format(active_site, - remote_site)) - log.debug("query: {}".format(aggr)) + self.log.debug("active_site:{} - remote_site:{}".format( + active_site, remote_site + )) + self.log.debug("query: {}".format(aggr)) representations = self.connection.aggregate(aggr) return representations @@ -1503,7 +1509,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if get_local_site_id() not in (local_site, remote_site): # don't do upload/download for studio sites - log.debug("No local site {} - {}".format(local_site, remote_site)) + self.log.debug( + "No local site {} - {}".format(local_site, remote_site) + ) return SyncStatus.DO_NOTHING _, remote_rec = self._get_site_rec(sites, remote_site) or {} @@ -1594,11 +1602,16 @@ class SyncServerModule(OpenPypeModule, ITrayModule): error_str = '' source_file = file.get("path", "") - log.debug("File for {} - {source_file} process {status} {error_str}". - format(representation_id, - status=status, - source_file=source_file, - error_str=error_str)) + self.log.debug( + ( + "File for {} - {source_file} process {status} {error_str}" + ).format( + representation_id, + status=status, + source_file=source_file, + error_str=error_str + ) + ) def _get_file_info(self, files, _id): """ @@ -1772,7 +1785,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): break if not found: msg = "Site {} not found".format(site_name) - log.info(msg) + self.log.info(msg) raise ValueError(msg) update = { @@ -1799,7 +1812,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): break if not found: msg = "Site {} not found".format(site_name) - log.info(msg) + self.log.info(msg) raise ValueError(msg) if pause: @@ -1834,7 +1847,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): reset_existing = False files = representation.get("files", []) if not files: - log.debug("No files for {}".format(representation_id)) + self.log.debug("No files for {}".format(representation_id)) return for repre_file in files: @@ -1851,7 +1864,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): reset_existing = True else: msg = "Site {} already present".format(site_name) - log.info(msg) + self.log.info(msg) raise SiteAlreadyPresentError(msg) if reset_existing: @@ -1951,16 +1964,19 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.widget = SyncServerWindow(self) no_errors = True except ValueError: - log.info("No system setting for sync. Not syncing.", exc_info=True) + self.log.info( + "No system setting for sync. Not syncing.", exc_info=True + ) except KeyError: - log.info(( + self.log.info(( "There are not set presets for SyncServer OR " "Credentials provided are invalid, " "no syncing possible"). format(str(self.sync_project_settings)), exc_info=True) except: - log.error("Uncaught exception durin start of SyncServer", - exc_info=True) + self.log.error( + "Uncaught exception durin start of SyncServer", + exc_info=True) self.enabled = no_errors self.widget.show() @@ -2069,3 +2085,46 @@ class SyncServerModule(OpenPypeModule, ITrayModule): settings ('presets') """ return presets[project_name]['sites'][site_name]['root'] + + def cli(self, click_group): + click_group.add_command(cli_main) + + +@click.group(SyncServerModule.name, help="SyncServer module related commands.") +def cli_main(): + pass + + +@cli_main.command() +@click.option( + "-a", + "--active_site", + required=True, + help="Name of active stie") +def syncservice(active_site): + """Launch sync server under entered site. + + This should be ideally used by system service (such us systemd or upstart + on linux and window service). + """ + + from openpype.modules import ModulesManager + + os.environ["OPENPYPE_LOCAL_ID"] = active_site + + def signal_handler(sig, frame): + print("You pressed Ctrl+C. Process ended.") + sync_server_module.server_exit() + sys.exit(0) + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + manager = ModulesManager() + sync_server_module = manager.modules_by_name["sync_server"] + + sync_server_module.server_init() + sync_server_module.server_start() + + while True: + time.sleep(1.0) diff --git a/openpype/modules/sync_server/tray/app.py b/openpype/modules/sync_server/tray/app.py index 96fad6a247..9b9768327e 100644 --- a/openpype/modules/sync_server/tray/app.py +++ b/openpype/modules/sync_server/tray/app.py @@ -2,7 +2,6 @@ from Qt import QtWidgets, QtCore, QtGui from openpype.tools.settings import style -from openpype.lib import PypeLogger from openpype import resources from .widgets import ( @@ -10,8 +9,6 @@ from .widgets import ( SyncRepresentationSummaryWidget ) -log = PypeLogger().get_logger("SyncServer") - class SyncServerWindow(QtWidgets.QDialog): """ diff --git a/openpype/modules/sync_server/tray/delegates.py b/openpype/modules/sync_server/tray/delegates.py index 5ab809a816..988eb40d28 100644 --- a/openpype/modules/sync_server/tray/delegates.py +++ b/openpype/modules/sync_server/tray/delegates.py @@ -1,8 +1,7 @@ import os from Qt import QtCore, QtWidgets, QtGui -from openpype.lib import PypeLogger -from . import lib +from openpype.lib import Logger from openpype.tools.utils.constants import ( LOCAL_PROVIDER_ROLE, @@ -16,7 +15,7 @@ from openpype.tools.utils.constants import ( EDIT_ICON_ROLE ) -log = PypeLogger().get_logger("SyncServer") +log = Logger.get_logger("SyncServer") class PriorityDelegate(QtWidgets.QStyledItemDelegate): diff --git a/openpype/modules/sync_server/tray/lib.py b/openpype/modules/sync_server/tray/lib.py index 87344be634..ff93815639 100644 --- a/openpype/modules/sync_server/tray/lib.py +++ b/openpype/modules/sync_server/tray/lib.py @@ -2,11 +2,6 @@ import attr import abc import six -from openpype.lib import PypeLogger - - -log = PypeLogger().get_logger("SyncServer") - STATUS = { 0: 'In Progress', 1: 'Queued', diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/sync_server/tray/models.py index 629c4cbbf1..d63d046508 100644 --- a/openpype/modules/sync_server/tray/models.py +++ b/openpype/modules/sync_server/tray/models.py @@ -9,8 +9,7 @@ import qtawesome from openpype.tools.utils.delegates import pretty_timestamp -from openpype.lib import PypeLogger -from openpype.api import get_local_site_id +from openpype.lib import Logger, get_local_site_id from openpype.client import get_representation_by_id from . import lib @@ -33,7 +32,7 @@ from openpype.tools.utils.constants import ( ) -log = PypeLogger().get_logger("SyncServer") +log = Logger.get_logger("SyncServer") class _SyncRepresentationModel(QtCore.QAbstractTableModel): diff --git a/openpype/modules/sync_server/tray/widgets.py b/openpype/modules/sync_server/tray/widgets.py index b4ee447ac4..c40aa98f24 100644 --- a/openpype/modules/sync_server/tray/widgets.py +++ b/openpype/modules/sync_server/tray/widgets.py @@ -9,8 +9,7 @@ import qtawesome from openpype.tools.settings import style -from openpype.api import get_local_site_id -from openpype.lib import PypeLogger +from openpype.lib import Logger, get_local_site_id from openpype.tools.utils.delegates import pretty_timestamp @@ -36,7 +35,7 @@ from openpype.tools.utils.constants import ( TRIES_ROLE ) -log = PypeLogger().get_logger("SyncServer") +log = Logger.get_logger("SyncServer") class SyncProjectListWidget(QtWidgets.QWidget): diff --git a/openpype/modules/sync_server/utils.py b/openpype/modules/sync_server/utils.py index 03f362202f..4caa01e9d7 100644 --- a/openpype/modules/sync_server/utils.py +++ b/openpype/modules/sync_server/utils.py @@ -1,6 +1,8 @@ import time -from openpype.api import Logger -log = Logger().get_logger("SyncServer") + +from openpype.lib import Logger + +log = Logger.get_logger("SyncServer") class ResumableError(Exception): diff --git a/openpype/modules/timers_manager/idle_threads.py b/openpype/modules/timers_manager/idle_threads.py index 9ec27e659b..7242761143 100644 --- a/openpype/modules/timers_manager/idle_threads.py +++ b/openpype/modules/timers_manager/idle_threads.py @@ -2,7 +2,7 @@ import time from Qt import QtCore from pynput import mouse, keyboard -from openpype.lib import PypeLogger +from openpype.lib import Logger class IdleItem: @@ -31,7 +31,7 @@ class IdleManager(QtCore.QThread): def __init__(self): super(IdleManager, self).__init__() - self.log = PypeLogger.get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) self.signal_reset_timer.connect(self._reset_time) self.idle_item = IdleItem() diff --git a/openpype/modules/timers_manager/rest_api.py b/openpype/modules/timers_manager/rest_api.py index f16cb316c3..4a2e9e6575 100644 --- a/openpype/modules/timers_manager/rest_api.py +++ b/openpype/modules/timers_manager/rest_api.py @@ -1,9 +1,7 @@ import json from aiohttp.web_response import Response -from openpype.api import Logger - -log = Logger().get_logger("Event processor") +from openpype.lib import Logger class TimersManagerModuleRestApi: @@ -12,6 +10,7 @@ class TimersManagerModuleRestApi: happens in Workfile app. """ def __init__(self, user_module, server_manager): + self._log = None self.module = user_module self.server_manager = server_manager @@ -19,6 +18,12 @@ class TimersManagerModuleRestApi: self.register() + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__ckass__.__name__) + return self._log + def register(self): self.server_manager.add_route( "POST", @@ -47,7 +52,7 @@ class TimersManagerModuleRestApi: "Payload must contain fields 'project_name," " 'asset_name' and 'task_name'" ) - log.error(msg) + self.log.error(msg) return Response(status=400, message=msg) self.module.stop_timers() @@ -73,7 +78,7 @@ class TimersManagerModuleRestApi: "Payload must contain fields 'project_name, 'asset_name'," " 'task_name'" ) - log.warning(message) + self.log.warning(message) return Response(text=message, status=404) time = self.module.get_task_time(project_name, asset_name, task_name) diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index 93332ace4f..c168e9534d 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -6,7 +6,6 @@ from openpype.client import get_asset_by_name from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayService, - ILaunchHookPaths, IPluginPaths ) from openpype.lib.events import register_event_callback @@ -79,7 +78,6 @@ class ExampleTimersManagerConnector: class TimersManager( OpenPypeModule, ITrayService, - ILaunchHookPaths, IPluginPaths ): """ Handles about Timers. @@ -185,12 +183,11 @@ class TimersManager( ) def get_launch_hook_paths(self): - """Implementation of `ILaunchHookPaths`.""" + """Implementation for applications launch hooks.""" - return os.path.join( - TIMER_MODULE_DIR, - "launch_hooks" - ) + return [ + os.path.join(TIMER_MODULE_DIR, "launch_hooks") + ] def get_plugin_paths(self): """Implementation of `IPluginPaths`.""" diff --git a/openpype/modules/webserver/server.py b/openpype/modules/webserver/server.py index 82b681f406..120925a362 100644 --- a/openpype/modules/webserver/server.py +++ b/openpype/modules/webserver/server.py @@ -4,16 +4,16 @@ import asyncio from aiohttp import web -from openpype.lib import PypeLogger +from openpype.lib import Logger from .cors_middleware import cors_middleware -log = PypeLogger.get_logger("WebServer") - class WebServerManager: """Manger that care about web server thread.""" def __init__(self, port=None, host=None): + self._log = None + self.port = port or 8079 self.host = host or "localhost" @@ -33,6 +33,12 @@ class WebServerManager: self.webserver_thread = WebServerThread(self) + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + @property def url(self): return "http://{}:{}".format(self.host, self.port) @@ -51,12 +57,12 @@ class WebServerManager: if not self.is_running: return try: - log.debug("Stopping Web server") + self.log.debug("Stopping Web server") self.webserver_thread.is_running = False self.webserver_thread.stop() except Exception: - log.warning( + self.log.warning( "Error has happened during Killing Web server", exc_info=True ) @@ -74,7 +80,10 @@ class WebServerManager: class WebServerThread(threading.Thread): """ Listener for requests in thread.""" + def __init__(self, manager): + self._log = None + super(WebServerThread, self).__init__() self.is_running = False @@ -84,6 +93,12 @@ class WebServerThread(threading.Thread): self.site = None self.tasks = [] + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + @property def port(self): return self.manager.port @@ -96,13 +111,13 @@ class WebServerThread(threading.Thread): self.is_running = True try: - log.info("Starting WebServer server") + self.log.info("Starting WebServer server") self.loop = asyncio.new_event_loop() # create new loop for thread asyncio.set_event_loop(self.loop) self.loop.run_until_complete(self.start_server()) - log.debug( + self.log.debug( "Running Web server on URL: \"localhost:{}\"".format(self.port) ) @@ -110,7 +125,7 @@ class WebServerThread(threading.Thread): self.loop.run_forever() except Exception: - log.warning( + self.log.warning( "Web Server service has failed", exc_info=True ) finally: @@ -118,7 +133,7 @@ class WebServerThread(threading.Thread): self.is_running = False self.manager.thread_stopped() - log.info("Web server stopped") + self.log.info("Web server stopped") async def start_server(self): """ Starts runner and TCPsite """ @@ -138,17 +153,17 @@ class WebServerThread(threading.Thread): while self.is_running: while self.tasks: task = self.tasks.pop(0) - log.debug("waiting for task {}".format(task)) + self.log.debug("waiting for task {}".format(task)) await task - log.debug("returned value {}".format(task.result)) + self.log.debug("returned value {}".format(task.result)) await asyncio.sleep(0.5) - log.debug("Starting shutdown") + self.log.debug("Starting shutdown") await self.site.stop() - log.debug("Site stopped") + self.log.debug("Site stopped") await self.runner.cleanup() - log.debug("Runner stopped") + self.log.debug("Runner stopped") tasks = [ task for task in asyncio.all_tasks() @@ -156,7 +171,9 @@ class WebServerThread(threading.Thread): ] list(map(lambda task: task.cancel(), tasks)) # cancel all the tasks results = await asyncio.gather(*tasks, return_exceptions=True) - log.debug(f'Finished awaiting cancelled tasks, results: {results}...') + self.log.debug( + f'Finished awaiting cancelled tasks, results: {results}...' + ) await self.loop.shutdown_asyncgens() # to really make sure everything else has time to stop await asyncio.sleep(0.07) diff --git a/openpype/modules/webserver/webserver_module.py b/openpype/modules/webserver/webserver_module.py index 686bd27bfd..16861abd29 100644 --- a/openpype/modules/webserver/webserver_module.py +++ b/openpype/modules/webserver/webserver_module.py @@ -53,9 +53,12 @@ class WebServerModule(OpenPypeModule, ITrayService): try: module.webserver_initialization(self.server_manager) except Exception: - self.log.warning(( - "Failed to connect module \"{}\" to webserver." - ).format(module.name)) + self.log.warning( + ( + "Failed to connect module \"{}\" to webserver." + ).format(module.name), + exc_info=True + ) def tray_init(self): self.create_server_manager() diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py index 08db4749b3..908dc2b187 100644 --- a/openpype/pipeline/anatomy.py +++ b/openpype/pipeline/anatomy.py @@ -6,17 +6,26 @@ import collections import numbers import six +import time -from openpype.settings.lib import get_anatomy_settings +from openpype.settings.lib import ( + get_project_settings, + get_local_settings, +) +from openpype.settings.constants import ( + DEFAULT_PROJECT_KEY +) + +from openpype.client import get_project from openpype.lib.path_templates import ( TemplateUnsolved, TemplateResult, TemplatesDict, FormatObject, ) -from openpype.lib.log import PypeLogger +from openpype.lib.log import Logger -log = PypeLogger.get_logger(__name__) +log = Logger.get_logger(__name__) class ProjectNotSet(Exception): @@ -39,34 +48,23 @@ class RootCombinationError(Exception): super(RootCombinationError, self).__init__(msg) -class Anatomy: +class BaseAnatomy(object): """Anatomy module helps to keep project settings. Wraps key project specifications, AnatomyTemplates and Roots. - - Args: - project_name (str): Project name to look on overrides. """ - root_key_regex = re.compile(r"{(root?[^}]+)}") root_name_regex = re.compile(r"root\[([^]]+)\]") - def __init__(self, project_name=None, site_name=None): - if not project_name: - project_name = os.environ.get("AVALON_PROJECT") - - if not project_name: - raise ProjectNotSet(( - "Implementation bug: Project name is not set. Anatomy requires" - " to load data for specific project." - )) - + def __init__(self, project_doc, local_settings, site_name): + project_name = project_doc["name"] self.project_name = project_name - self._data = self._prepare_anatomy_data( - get_anatomy_settings(project_name, site_name) - ) self._site_name = site_name + + self._data = self._prepare_anatomy_data( + project_doc, local_settings, site_name + ) self._templates_obj = AnatomyTemplates(self) self._roots_obj = Roots(self) @@ -87,12 +85,14 @@ class Anatomy: def items(self): return copy.deepcopy(self._data).items() - @staticmethod - def _prepare_anatomy_data(anatomy_data): + def _prepare_anatomy_data(self, project_doc, local_settings, site_name): """Prepare anatomy data for further processing. Method added to replace `{task}` with `{task[name]}` in templates. """ + project_name = project_doc["name"] + anatomy_data = self._project_doc_to_anatomy_data(project_doc) + templates_data = anatomy_data.get("templates") if templates_data: # Replace `{task}` with `{task[name]}` in templates @@ -103,23 +103,13 @@ class Anatomy: if not isinstance(item, dict): continue - for key in tuple(item.keys()): - value = item[key] - if isinstance(value, dict): - value_queue.append(value) + self._apply_local_settings_on_anatomy_data(anatomy_data, + local_settings, + project_name, + site_name) - elif isinstance(value, six.string_types): - item[key] = value.replace("{task}", "{task[name]}") return anatomy_data - def reset(self): - """Reset values of cached data in templates and roots objects.""" - self._data = self._prepare_anatomy_data( - get_anatomy_settings(self.project_name, self._site_name) - ) - self.templates_obj.reset() - self.roots_obj.reset() - @property def templates(self): """Wrap property `templates` of Anatomy's AnatomyTemplates instance.""" @@ -338,6 +328,161 @@ class Anatomy: data = self.root_environmets_fill_data(template) return rootless_path.format(**data) + def _project_doc_to_anatomy_data(self, project_doc): + """Convert project document to anatomy data. + + Probably should fill missing keys and values. + """ + + output = copy.deepcopy(project_doc["config"]) + output["attributes"] = copy.deepcopy(project_doc["data"]) + + return output + + def _apply_local_settings_on_anatomy_data( + self, anatomy_data, local_settings, project_name, site_name + ): + """Apply local settings on anatomy data. + + ATM local settings can modify project roots. Project name is required + as local settings have data stored data by project's name. + + Local settings override root values in this order: + 1.) Check if local settings contain overrides for default project and + apply it's values on roots if there are any. + 2.) If passed `project_name` is not None then check project specific + overrides in local settings for the project and apply it's value on + roots if there are any. + + NOTE: Root values of default project from local settings are always + applied if are set. + + Args: + anatomy_data (dict): Data for anatomy. + local_settings (dict): Data of local settings. + project_name (str): Name of project for which anatomy data are. + """ + if not local_settings: + return + + local_project_settings = local_settings.get("projects") or {} + + # Check for roots existence in local settings first + roots_project_locals = ( + local_project_settings + .get(project_name, {}) + ) + roots_default_locals = ( + local_project_settings + .get(DEFAULT_PROJECT_KEY, {}) + ) + + # Skip rest of processing if roots are not set + if not roots_project_locals and not roots_default_locals: + return + + # Combine roots from local settings + roots_locals = roots_default_locals.get(site_name) or {} + roots_locals.update(roots_project_locals.get(site_name) or {}) + # Skip processing if roots for current active site are not available in + # local settings + if not roots_locals: + return + + current_platform = platform.system().lower() + + root_data = anatomy_data["roots"] + for root_name, path in roots_locals.items(): + if root_name not in root_data: + continue + anatomy_data["roots"][root_name][current_platform] = ( + path + ) + + +class Anatomy(BaseAnatomy): + _project_cache = {} + _site_cache = {} + + def __init__(self, project_name=None, site_name=None): + if not project_name: + project_name = os.environ.get("AVALON_PROJECT") + + if not project_name: + raise ProjectNotSet(( + "Implementation bug: Project name is not set. Anatomy requires" + " to load data for specific project." + )) + + project_doc = self.get_project_doc_from_cache(project_name) + local_settings = get_local_settings() + if not site_name: + site_name = self.get_site_name_from_cache( + project_name, local_settings + ) + + super(Anatomy, self).__init__( + project_doc, + local_settings, + site_name + ) + + @classmethod + def get_project_doc_from_cache(cls, project_name): + project_cache = cls._project_cache.get(project_name) + if project_cache is not None: + if time.time() - project_cache["start"] > 10: + cls._project_cache.pop(project_name) + project_cache = None + + if project_cache is None: + project_cache = { + "project_doc": get_project(project_name), + "start": time.time() + } + cls._project_cache[project_name] = project_cache + + return copy.deepcopy( + cls._project_cache[project_name]["project_doc"] + ) + + @classmethod + def get_site_name_from_cache(cls, project_name, local_settings): + site_cache = cls._site_cache.get(project_name) + if site_cache is not None: + if time.time() - site_cache["start"] > 10: + cls._site_cache.pop(project_name) + site_cache = None + + if site_cache: + return site_cache["site_name"] + + local_project_settings = local_settings.get("projects") + if not local_project_settings: + return + + project_locals = local_project_settings.get(project_name) or {} + default_locals = local_project_settings.get(DEFAULT_PROJECT_KEY) or {} + active_site = ( + project_locals.get("active_site") + or default_locals.get("active_site") + ) + if not active_site: + project_settings = get_project_settings(project_name) + active_site = ( + project_settings + ["global"] + ["sync_server"] + ["config"] + ["active_site"] + ) + + cls._site_cache[project_name] = { + "site_name": active_site, + "start": time.time() + } + return active_site + class AnatomyTemplateUnsolved(TemplateUnsolved): """Exception for unsolved template when strict is set to True.""" diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index 5f763cd249..af0ee79f47 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -16,6 +16,7 @@ from openpype.client import ( get_asset_by_name, version_is_latest, ) +from openpype.lib.events import emit_event from openpype.modules import load_modules, ModulesManager from openpype.settings import get_project_settings @@ -29,7 +30,7 @@ from .workfile import ( from . import ( legacy_io, register_loader_plugin_path, - register_inventory_action, + register_inventory_action_path, register_creator_plugin_path, deregister_loader_plugin_path, ) @@ -196,7 +197,7 @@ def install_openpype_plugins(project_name=None, host_name=None): pyblish.api.register_plugin_path(path) register_loader_plugin_path(path) register_creator_plugin_path(path) - register_inventory_action(path) + register_inventory_action_path(path) def uninstall_host(): @@ -445,3 +446,103 @@ def get_custom_workfile_template_from_session( session["AVALON_APP"], project_settings=project_settings ) + + +def compute_session_changes( + session, asset_doc, task_name, template_key=None +): + """Compute the changes for a session object on task under asset. + + Function does not change the session object, only returns changes. + + Args: + session (Dict[str, str]): The initial session to compute changes to. + This is required for computing the full Work Directory, as that + also depends on the values that haven't changed. + asset_doc (Dict[str, Any]): Asset document to switch to. + task_name (str): Name of task to switch to. + template_key (Union[str, None]): Prepare workfile template key in + anatomy templates. + + Returns: + Dict[str, str]: Changes in the Session dictionary. + """ + + changes = {} + + # Get asset document and asset + if not asset_doc: + task_name = None + asset_name = None + else: + asset_name = asset_doc["name"] + + # Detect any changes compared session + mapping = { + "AVALON_ASSET": asset_name, + "AVALON_TASK": task_name, + } + changes = { + key: value + for key, value in mapping.items() + if value != session.get(key) + } + if not changes: + return changes + + # Compute work directory (with the temporary changed session so far) + changed_session = session.copy() + changed_session.update(changes) + + workdir = None + if asset_doc: + workdir = get_workdir_from_session( + changed_session, template_key + ) + + changes["AVALON_WORKDIR"] = workdir + + return changes + + +def change_current_context(asset_doc, task_name, template_key=None): + """Update active Session to a new task work area. + + This updates the live Session to a different task under asset. + + Args: + asset_doc (Dict[str, Any]): The asset document to set. + task_name (str): The task to set under asset. + template_key (Union[str, None]): Prepared template key to be used for + workfile template in Anatomy. + + Returns: + Dict[str, str]: The changed key, values in the current Session. + """ + + changes = compute_session_changes( + legacy_io.Session, + asset_doc, + task_name, + template_key=template_key + ) + + # Update the Session and environments. Pop from environments all keys with + # value set to None. + for key, value in changes.items(): + legacy_io.Session[key] = value + if value is None: + os.environ.pop(key, None) + else: + os.environ[key] = value + + data = changes.copy() + # Convert env keys to human readable keys + data["project_name"] = legacy_io.Session["AVALON_PROJECT"] + data["asset_name"] = legacy_io.Session["AVALON_ASSET"] + data["task_name"] = legacy_io.Session["AVALON_TASK"] + + # Emit session change + emit_event("taskChanged", data) + + return changes diff --git a/openpype/pipeline/create/__init__.py b/openpype/pipeline/create/__init__.py index bd196ccfd1..4b91951a08 100644 --- a/openpype/pipeline/create/__init__.py +++ b/openpype/pipeline/create/__init__.py @@ -1,6 +1,13 @@ from .constants import ( - SUBSET_NAME_ALLOWED_SYMBOLS + SUBSET_NAME_ALLOWED_SYMBOLS, + DEFAULT_SUBSET_TEMPLATE, ) + +from .subset_name import ( + TaskNotSetError, + get_subset_name, +) + from .creator_plugins import ( CreatorError, @@ -9,8 +16,10 @@ from .creator_plugins import ( AutoCreator, HiddenCreator, - discover_creator_plugins, discover_legacy_creator_plugins, + get_legacy_creator_by_name, + + discover_creator_plugins, register_creator_plugin, deregister_creator_plugin, register_creator_plugin_path, @@ -30,6 +39,10 @@ from .legacy_create import ( __all__ = ( "SUBSET_NAME_ALLOWED_SYMBOLS", + "DEFAULT_SUBSET_TEMPLATE", + + "TaskNotSetError", + "get_subset_name", "CreatorError", @@ -38,8 +51,10 @@ __all__ = ( "AutoCreator", "HiddenCreator", - "discover_creator_plugins", "discover_legacy_creator_plugins", + "get_legacy_creator_by_name", + + "discover_creator_plugins", "register_creator_plugin", "deregister_creator_plugin", "register_creator_plugin_path", diff --git a/openpype/pipeline/create/constants.py b/openpype/pipeline/create/constants.py index bfbbccfd12..3af9651947 100644 --- a/openpype/pipeline/create/constants.py +++ b/openpype/pipeline/create/constants.py @@ -1,6 +1,8 @@ SUBSET_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_." +DEFAULT_SUBSET_TEMPLATE = "{family}{Variant}" __all__ = ( "SUBSET_NAME_ALLOWED_SYMBOLS", + "DEFAULT_SUBSET_TEMPLATE", ) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index eaaed39357..3e09ff287d 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -7,7 +7,11 @@ from uuid import uuid4 from contextlib import contextmanager from openpype.client import get_assets -from openpype.host import INewPublisher +from openpype.settings import ( + get_system_settings, + get_project_settings +) +from openpype.host import IPublishHost from openpype.pipeline import legacy_io from openpype.pipeline.mongodb import ( AvalonMongoDB, @@ -20,14 +24,14 @@ from .creator_plugins import ( discover_creator_plugins, ) -from openpype.api import ( - get_system_settings, - get_project_settings -) - UpdateData = collections.namedtuple("UpdateData", ["instance", "changes"]) +class UnavailableSharedData(Exception): + """Shared data are not available at the moment when are accessed.""" + pass + + class ImmutableKeyError(TypeError): """Accessed key is immutable so does not allow changes or removements.""" @@ -167,7 +171,10 @@ class AttributeValues: return self._data.pop(key, default) def reset_values(self): - self._data = [] + self._data = {} + + def mark_as_stored(self): + self._origin_data = copy.deepcopy(self._data) @property def attr_defs(self): @@ -198,6 +205,16 @@ class AttributeValues: def changes(self): return self.calculate_changes(self._data, self._origin_data) + def apply_changes(self, changes): + for key, item in changes.items(): + old_value, new_value = item + if new_value is None: + if key in self: + self.pop(key) + + elif self.get(key) != new_value: + self[key] = new_value + class CreatorAttributeValues(AttributeValues): """Creator specific attribute values of an instance. @@ -304,6 +321,9 @@ class PublishAttributes: for name in self._plugin_names_order: yield name + def mark_as_stored(self): + self._origin_data = copy.deepcopy(self._data) + def data_to_store(self): """Convert attribute values to "data to store".""" @@ -328,6 +348,21 @@ class PublishAttributes: changes[key] = (value, None) return changes + def apply_changes(self, changes): + for key, item in changes.items(): + if isinstance(item, dict): + self._data[key].apply_changes(item) + continue + + old_value, new_value = item + if new_value is not None: + raise ValueError( + "Unexpected type \"{}\" expected None".format( + str(type(new_value)) + ) + ) + self.pop(key) + def set_publish_plugins(self, attr_plugins): """Set publish plugins attribute definitions.""" @@ -402,8 +437,12 @@ class CreatedInstance: self.creator = creator # Instance members may have actions on them + # TODO implement members logic self._members = [] + # Data that can be used for lifetime of object + self._transient_data = {} + # Create a copy of passed data to avoid changing them on the fly data = copy.deepcopy(data or {}) # Store original value of passed data @@ -596,6 +635,26 @@ class CreatedInstance: return self + @property + def transient_data(self): + """Data stored for lifetime of instance object. + + These data are not stored to scene and will be lost on object + deletion. + + Can be used to store objects. In some host implementations is not + possible to reference to object in scene with some unique identifier + (e.g. node in Fusion.). In that case it is handy to store the object + here. Should be used that way only if instance data are stored on the + node itself. + + Returns: + Dict[str, Any]: Dictionary object where you can store data related + to instance for lifetime of instance object. + """ + + return self._transient_data + def changes(self): """Calculate and return changes.""" @@ -623,6 +682,25 @@ class CreatedInstance: changes[key] = (old_value, None) return changes + def mark_as_stored(self): + """Should be called when instance data are stored. + + Origin data are replaced by current data so changes are cleared. + """ + + orig_keys = set(self._orig_data.keys()) + for key, value in self._data.items(): + orig_keys.discard(key) + if key in ("creator_attributes", "publish_attributes"): + continue + self._orig_data[key] = copy.deepcopy(value) + + for key in orig_keys: + self._orig_data.pop(key) + + self.creator_attributes.mark_as_stored() + self.publish_attributes.mark_as_stored() + @property def creator_attributes(self): return self._data["creator_attributes"] @@ -636,6 +714,18 @@ class CreatedInstance: return self._data["publish_attributes"] def data_to_store(self): + """Collect data that contain json parsable types. + + It is possible to recreate the instance using these data. + + Todo: + We probably don't need OrderedDict. When data are loaded they + are not ordered anymore. + + Returns: + OrderedDict: Ordered dictionary with instance data. + """ + output = collections.OrderedDict() for key, value in self._data.items(): if key in ("creator_attributes", "publish_attributes"): @@ -670,6 +760,97 @@ class CreatedInstance: if member not in self._members: self._members.append(member) + def serialize_for_remote(self): + return { + "data": self.data_to_store(), + "orig_data": copy.deepcopy(self._orig_data) + } + + @classmethod + def deserialize_on_remote(cls, serialized_data, creator_items): + """Convert instance data to CreatedInstance. + + This is fake instance in remote process e.g. in UI process. The creator + is not a full creator and should not be used for calling methods when + instance is created from this method (matters on implementation). + + Args: + serialized_data (Dict[str, Any]): Serialized data for remote + recreating. Should contain 'data' and 'orig_data'. + creator_items (Dict[str, Any]): Mapping of creator identifier and + objects that behave like a creator for most of attribute + access. + """ + + instance_data = copy.deepcopy(serialized_data["data"]) + creator_identifier = instance_data["creator_identifier"] + creator_item = creator_items[creator_identifier] + + family = instance_data.get("family", None) + if family is None: + family = creator_item.family + subset_name = instance_data.get("subset", None) + + obj = cls( + family, subset_name, instance_data, creator_item, new=False + ) + obj._orig_data = serialized_data["orig_data"] + + return obj + + def remote_changes(self): + """Prepare serializable changes on remote side. + + Returns: + Dict[str, Any]: Prepared changes that can be send to client side. + """ + + return { + "changes": self.changes(), + "asset_is_valid": self._asset_is_valid, + "task_is_valid": self._task_is_valid, + } + + def update_from_remote(self, remote_changes): + """Apply changes from remote side on client side. + + Args: + remote_changes (Dict[str, Any]): Changes created on remote side. + """ + + self._asset_is_valid = remote_changes["asset_is_valid"] + self._task_is_valid = remote_changes["task_is_valid"] + + changes = remote_changes["changes"] + creator_attributes = changes.pop("creator_attributes", None) or {} + publish_attributes = changes.pop("publish_attributes", None) or {} + if changes: + self.apply_changes(changes) + + if creator_attributes: + self.creator_attributes.apply_changes(creator_attributes) + + if publish_attributes: + self.publish_attributes.apply_changes(publish_attributes) + + def apply_changes(self, changes): + """Apply changes created via 'changes'. + + Args: + Dict[str, Tuple[Any, Any]]: Instance changes to apply. Same values + are kept untouched. + """ + + for key, item in changes.items(): + old_value, new_value = item + if new_value is None: + if key in self: + self.pop(key) + else: + current_value = self.get(key) + if current_value != new_value: + self[key] = new_value + class CreateContext: """Context of instance creation. @@ -749,6 +930,9 @@ class CreateContext: self._bulk_counter = 0 self._bulk_instances_to_process = [] + # Shared data across creators during collection phase + self._collection_shared_data = None + # Trigger reset if was enabled if reset: self.reset(discover_publish_plugins) @@ -757,6 +941,10 @@ class CreateContext: def instances(self): return self._instances_by_id.values() + @property + def instances_by_id(self): + return self._instances_by_id + @property def publish_attributes(self): """Access to global publish attributes.""" @@ -771,7 +959,7 @@ class CreateContext: """ missing = set( - INewPublisher.get_missing_publish_methods(host) + IPublishHost.get_missing_publish_methods(host) ) return missing @@ -800,6 +988,9 @@ class CreateContext: All changes will be lost if were not saved explicitely. """ + + self.reset_preparation() + self.reset_avalon_context() self.reset_plugins(discover_publish_plugins) self.reset_context_data() @@ -808,6 +999,20 @@ class CreateContext: self.reset_instances() self.execute_autocreators() + self.reset_finalization() + + def reset_preparation(self): + """Prepare attributes that must be prepared/cleaned before reset.""" + + # Give ability to store shared data for collection phase + self._collection_shared_data = {} + + def reset_finalization(self): + """Cleanup of attributes after reset.""" + + # Stop access to collection shared data + self._collection_shared_data = None + def reset_avalon_context(self): """Give ability to reset avalon context. @@ -916,7 +1121,8 @@ class CreateContext: and creator_class.host_name != self.host_name ): self.log.info(( - "Creator's host name is not supported for current host {}" + "Creator's host name \"{}\"" + " is not supported for current host \"{}\"" ).format(creator_class.host_name, self.host_name)) continue @@ -1191,3 +1397,20 @@ class CreateContext: if not plugin.__instanceEnabled__: plugins.append(plugin) return plugins + + @property + def collection_shared_data(self): + """Access to shared data that can be used during creator's collection. + + Retruns: + Dict[str, Any]: Shared data. + + Raises: + UnavailableSharedData: When called out of collection phase. + """ + + if self._collection_shared_data is None: + raise UnavailableSharedData( + "Accessed Collection shared data out of collection phase" + ) + return self._collection_shared_data diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 9a5d559774..97ee94c449 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -6,10 +6,11 @@ from abc import ( abstractmethod, abstractproperty ) + import six from openpype.settings import get_system_settings, get_project_settings -from openpype.lib import get_subset_name_with_asset_doc +from openpype.lib import Logger from openpype.pipeline.plugin_discover import ( discover, register_plugin, @@ -18,6 +19,7 @@ from openpype.pipeline.plugin_discover import ( deregister_plugin_path ) +from .subset_name import get_subset_name from .legacy_create import LegacyCreator @@ -75,11 +77,19 @@ class BaseCreator: ): # Reference to CreateContext self.create_context = create_context + self.project_settings = project_settings # Creator is running in headless mode (without UI elemets) # - we may use UI inside processing this attribute should be checked self.headless = headless + self.apply_settings(project_settings, system_settings) + + def apply_settings(self, project_settings, system_settings): + """Method called on initialization of plugin to apply settings.""" + + pass + @property def identifier(self): """Identifier of creator (must be unique). @@ -135,8 +145,6 @@ class BaseCreator: """ if self._log is None: - from openpype.api import Logger - self._log = Logger.get_logger(self.__class__.__name__) return self._log @@ -239,7 +247,7 @@ class BaseCreator: return self.icon def get_dynamic_data( - self, variant, task_name, asset_doc, project_name, host_name + self, variant, task_name, asset_doc, project_name, host_name, instance ): """Dynamic data for subset name filling. @@ -250,7 +258,13 @@ class BaseCreator: return {} def get_subset_name( - self, variant, task_name, asset_doc, project_name, host_name=None + self, + variant, + task_name, + asset_doc, + project_name, + host_name=None, + instance=None ): """Return subset name for passed context. @@ -264,26 +278,32 @@ class BaseCreator: Asset document is not used yet but is required if would like to use task type in subset templates. + Method is also called on subset name update. In that case origin + instance is passed in. + Args: variant(str): Subset name variant. In most of cases user input. task_name(str): For which task subset is created. asset_doc(dict): Asset document for which subset is created. project_name(str): Project name. host_name(str): Which host creates subset. + instance(str|None): Object of 'CreatedInstance' for which is + subset name updated. Passed only on subset name update. """ dynamic_data = self.get_dynamic_data( - variant, task_name, asset_doc, project_name, host_name + variant, task_name, asset_doc, project_name, host_name, instance ) - return get_subset_name_with_asset_doc( + return get_subset_name( self.family, variant, task_name, asset_doc, project_name, host_name, - dynamic_data=dynamic_data + dynamic_data=dynamic_data, + project_settings=self.project_settings ) def get_instance_attr_defs(self): @@ -304,6 +324,19 @@ class BaseCreator: return self.instance_attr_defs + @property + def collection_shared_data(self): + """Access to shared data that can be used during creator's collection. + + Retruns: + Dict[str, Any]: Shared data. + + Raises: + UnavailableSharedData: When called out of collection phase. + """ + + return self.create_context.collection_shared_data + class Creator(BaseCreator): """Creator that has more information for artist to show in UI. @@ -458,6 +491,34 @@ def discover_legacy_creator_plugins(): return plugins +def get_legacy_creator_by_name(creator_name, case_sensitive=False): + """Find creator plugin by name. + + Args: + creator_name (str): Name of creator class that should be returned. + case_sensitive (bool): Match of creator plugin name is case sensitive. + Set to `False` by default. + + Returns: + Creator: Return first matching plugin or `None`. + """ + + # Lower input creator name if is not case sensitive + if not case_sensitive: + creator_name = creator_name.lower() + + for creator_plugin in discover_legacy_creator_plugins(): + _creator_name = creator_plugin.__name__ + + # Lower creator plugin name if is not case sensitive + if not case_sensitive: + _creator_name = _creator_name.lower() + + if _creator_name == creator_name: + return creator_plugin + return None + + def register_creator_plugin(plugin): if issubclass(plugin, BaseCreator): register_plugin(BaseCreator, plugin) diff --git a/openpype/pipeline/create/legacy_create.py b/openpype/pipeline/create/legacy_create.py index 2764b3cb95..82e5de7a8f 100644 --- a/openpype/pipeline/create/legacy_create.py +++ b/openpype/pipeline/create/legacy_create.py @@ -9,7 +9,9 @@ import os import logging import collections -from openpype.lib import get_subset_name +from openpype.client import get_asset_by_id + +from .subset_name import get_subset_name class LegacyCreator(object): @@ -147,11 +149,15 @@ class LegacyCreator(object): variant, task_name, asset_id, project_name, host_name ) + asset_doc = get_asset_by_id( + project_name, asset_id, fields=["data.tasks"] + ) + return get_subset_name( cls.family, variant, task_name, - asset_id, + asset_doc, project_name, host_name, dynamic_data=dynamic_data diff --git a/openpype/pipeline/create/subset_name.py b/openpype/pipeline/create/subset_name.py new file mode 100644 index 0000000000..f508263708 --- /dev/null +++ b/openpype/pipeline/create/subset_name.py @@ -0,0 +1,109 @@ +import os + +from openpype.settings import get_project_settings +from openpype.lib import filter_profiles, prepare_template_data +from openpype.pipeline import legacy_io + +from .constants import DEFAULT_SUBSET_TEMPLATE + + +class TaskNotSetError(KeyError): + def __init__(self, msg=None): + if not msg: + msg = "Creator's subset name template requires task name." + super(TaskNotSetError, self).__init__(msg) + + +def get_subset_name( + family, + variant, + task_name, + asset_doc, + project_name=None, + host_name=None, + default_template=None, + dynamic_data=None, + project_settings=None +): + """Calculate subset name based on passed context and OpenPype settings. + + Subst name templates are defined in `project_settings/global/tools/creator + /subset_name_profiles` where are profiles with host name, family, task name + and task type filters. If context does not match any profile then + `DEFAULT_SUBSET_TEMPLATE` is used as default template. + + That's main reason why so many arguments are required to calculate subset + name. + + Args: + family (str): Instance family. + variant (str): In most of cases it is user input during creation. + task_name (str): Task name on which context is instance created. + asset_doc (dict): Queried asset document with it's tasks in data. + Used to get task type. + project_name (str): Name of project on which is instance created. + Important for project settings that are loaded. + host_name (str): One of filtering criteria for template profile + filters. + default_template (str): Default template if any profile does not match + passed context. Constant 'DEFAULT_SUBSET_TEMPLATE' is used if + is not passed. + dynamic_data (dict): Dynamic data specific for a creator which creates + instance. + dbcon (AvalonMongoDB): Mongo connection to be able query asset document + if 'asset_doc' is not passed. + """ + + if not family: + return "" + + if not host_name: + host_name = os.environ["AVALON_APP"] + + # Use only last part of class family value split by dot (`.`) + family = family.rsplit(".", 1)[-1] + + if project_name is None: + project_name = legacy_io.Session["AVALON_PROJECT"] + + asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + task_info = asset_tasks.get(task_name) or {} + task_type = task_info.get("type") + + # Get settings + if not project_settings: + project_settings = get_project_settings(project_name) + tools_settings = project_settings["global"]["tools"] + profiles = tools_settings["creator"]["subset_name_profiles"] + filtering_criteria = { + "families": family, + "hosts": host_name, + "tasks": task_name, + "task_types": task_type + } + + matching_profile = filter_profiles(profiles, filtering_criteria) + template = None + if matching_profile: + template = matching_profile["template"] + + # Make sure template is set (matching may have empty string) + if not template: + template = default_template or DEFAULT_SUBSET_TEMPLATE + + # Simple check of task name existence for template with {task} in + # - missing task should be possible only in Standalone publisher + if not task_name and "{task" in template.lower(): + raise TaskNotSetError() + + fill_pairs = { + "variant": variant, + "family": family, + "task": task_name + } + if dynamic_data: + # Dynamic data may override default values + for key, value in dynamic_data.items(): + fill_pairs[key] = value + + return template.format(**prepare_template_data(fill_pairs)) diff --git a/openpype/pipeline/delivery.py b/openpype/pipeline/delivery.py new file mode 100644 index 0000000000..8cf9a43aac --- /dev/null +++ b/openpype/pipeline/delivery.py @@ -0,0 +1,310 @@ +"""Functions useful for delivery of published representations.""" +import os +import shutil +import glob +import clique +import collections + +from openpype.lib import create_hard_link + + +def _copy_file(src_path, dst_path): + """Hardlink file if possible(to save space), copy if not. + + Because of using hardlinks should not be function used in other parts + of pipeline. + """ + + if os.path.exists(dst_path): + return + try: + create_hard_link( + src_path, + dst_path + ) + except OSError: + shutil.copyfile(src_path, dst_path) + + +def get_format_dict(anatomy, location_path): + """Returns replaced root values from user provider value. + + Args: + anatomy (Anatomy): Project anatomy. + location_path (str): User provided value. + + Returns: + (dict): Prepared data for formatting of a template. + """ + + format_dict = {} + if not location_path: + return format_dict + + location_path = location_path.replace("\\", "/") + root_names = anatomy.root_names_from_templates( + anatomy.templates["delivery"] + ) + format_dict["root"] = {} + for name in root_names: + format_dict["root"][name] = location_path + return format_dict + + +def check_destination_path( + repre_id, + anatomy, + anatomy_data, + datetime_data, + template_name +): + """ Try to create destination path based on 'template_name'. + + In the case that path cannot be filled, template contains unmatched + keys, provide error message to filter out repre later. + + Args: + repre_id (str): Representation id. + anatomy (Anatomy): Project anatomy. + anatomy_data (dict): Template data to fill anatomy templates. + datetime_data (dict): Values with actual date. + template_name (str): Name of template which should be used from anatomy + templates. + Returns: + Dict[str, List[str]]: Report of happened errors. Key is message title + value is detailed information. + """ + + anatomy_data.update(datetime_data) + anatomy_filled = anatomy.format_all(anatomy_data) + dest_path = anatomy_filled["delivery"][template_name] + report_items = collections.defaultdict(list) + + if not dest_path.solved: + msg = ( + "Missing keys in Representation's context" + " for anatomy template \"{}\"." + ).format(template_name) + + sub_msg = ( + "Representation: {}
" + ).format(repre_id) + + if dest_path.missing_keys: + keys = ", ".join(dest_path.missing_keys) + sub_msg += ( + "- Missing keys: \"{}\"
" + ).format(keys) + + if dest_path.invalid_types: + items = [] + for key, value in dest_path.invalid_types.items(): + items.append("\"{}\" {}".format(key, str(value))) + + keys = ", ".join(items) + sub_msg += ( + "- Invalid value DataType: \"{}\"
" + ).format(keys) + + report_items[msg].append(sub_msg) + + return report_items + + +def deliver_single_file( + src_path, + repre, + anatomy, + template_name, + anatomy_data, + format_dict, + report_items, + log +): + """Copy single file to calculated path based on template + + Args: + src_path(str): path of source representation file + repre (dict): full repre, used only in deliver_sequence, here only + as to share same signature + anatomy (Anatomy) + template_name (string): user selected delivery template name + anatomy_data (dict): data from repre to fill anatomy with + format_dict (dict): root dictionary with names and values + report_items (collections.defaultdict): to return error messages + log (logging.Logger): for log printing + + Returns: + (collections.defaultdict, int) + """ + + # Make sure path is valid for all platforms + src_path = os.path.normpath(src_path.replace("\\", "/")) + + if not os.path.exists(src_path): + msg = "{} doesn't exist for {}".format(src_path, repre["_id"]) + report_items["Source file was not found"].append(msg) + return report_items, 0 + + anatomy_filled = anatomy.format(anatomy_data) + if format_dict: + template_result = anatomy_filled["delivery"][template_name] + delivery_path = template_result.rootless.format(**format_dict) + else: + delivery_path = anatomy_filled["delivery"][template_name] + + # Backwards compatibility when extension contained `.` + delivery_path = delivery_path.replace("..", ".") + # Make sure path is valid for all platforms + delivery_path = os.path.normpath(delivery_path.replace("\\", "/")) + + delivery_folder = os.path.dirname(delivery_path) + if not os.path.exists(delivery_folder): + os.makedirs(delivery_folder) + + log.debug("Copying single: {} -> {}".format(src_path, delivery_path)) + _copy_file(src_path, delivery_path) + + return report_items, 1 + + +def deliver_sequence( + src_path, + repre, + anatomy, + template_name, + anatomy_data, + format_dict, + report_items, + log +): + """ For Pype2(mainly - works in 3 too) where representation might not + contain files. + + Uses listing physical files (not 'files' on repre as a)might not be + present, b)might not be reliable for representation and copying them. + + TODO Should be refactored when files are sufficient to drive all + representations. + + Args: + src_path(str): path of source representation file + repre (dict): full representation + anatomy (Anatomy) + template_name (string): user selected delivery template name + anatomy_data (dict): data from repre to fill anatomy with + format_dict (dict): root dictionary with names and values + report_items (collections.defaultdict): to return error messages + log (logging.Logger): for log printing + + Returns: + (collections.defaultdict, int) + """ + + src_path = os.path.normpath(src_path.replace("\\", "/")) + + def hash_path_exist(myPath): + res = myPath.replace('#', '*') + glob_search_results = glob.glob(res) + if len(glob_search_results) > 0: + return True + return False + + if not hash_path_exist(src_path): + msg = "{} doesn't exist for {}".format( + src_path, repre["_id"]) + report_items["Source file was not found"].append(msg) + return report_items, 0 + + delivery_templates = anatomy.templates.get("delivery") or {} + delivery_template = delivery_templates.get(template_name) + if delivery_template is None: + msg = ( + "Delivery template \"{}\" in anatomy of project \"{}\"" + " was not found" + ).format(template_name, anatomy.project_name) + report_items[""].append(msg) + return report_items, 0 + + # Check if 'frame' key is available in template which is required + # for sequence delivery + if "{frame" not in delivery_template: + msg = ( + "Delivery template \"{}\" in anatomy of project \"{}\"" + "does not contain '{{frame}}' key to fill. Delivery of sequence" + " can't be processed." + ).format(template_name, anatomy.project_name) + report_items[""].append(msg) + return report_items, 0 + + dir_path, file_name = os.path.split(str(src_path)) + + context = repre["context"] + ext = context.get("ext", context.get("representation")) + + if not ext: + msg = "Source extension not found, cannot find collection" + report_items[msg].append(src_path) + log.warning("{} <{}>".format(msg, context)) + return report_items, 0 + + ext = "." + ext + # context.representation could be .psd + ext = ext.replace("..", ".") + + src_collections, remainder = clique.assemble(os.listdir(dir_path)) + src_collection = None + for col in src_collections: + if col.tail != ext: + continue + + src_collection = col + break + + if src_collection is None: + msg = "Source collection of files was not found" + report_items[msg].append(src_path) + log.warning("{} <{}>".format(msg, src_path)) + return report_items, 0 + + frame_indicator = "@####@" + + anatomy_data["frame"] = frame_indicator + anatomy_filled = anatomy.format(anatomy_data) + + if format_dict: + template_result = anatomy_filled["delivery"][template_name] + delivery_path = template_result.rootless.format(**format_dict) + else: + delivery_path = anatomy_filled["delivery"][template_name] + + delivery_path = os.path.normpath(delivery_path.replace("\\", "/")) + delivery_folder = os.path.dirname(delivery_path) + dst_head, dst_tail = delivery_path.split(frame_indicator) + dst_padding = src_collection.padding + dst_collection = clique.Collection( + head=dst_head, + tail=dst_tail, + padding=dst_padding + ) + + if not os.path.exists(delivery_folder): + os.makedirs(delivery_folder) + + src_head = src_collection.head + src_tail = src_collection.tail + uploaded = 0 + for index in src_collection.indexes: + src_padding = src_collection.format("{padding}") % index + src_file_name = "{}{}{}".format(src_head, src_padding, src_tail) + src = os.path.normpath( + os.path.join(dir_path, src_file_name) + ) + + dst_padding = dst_collection.format("{padding}") % index + dst = "{}{}{}".format(dst_head, dst_padding, dst_tail) + log.debug("Copying single: {} -> {}".format(src, dst)) + _copy_file(src, dst) + uploaded += 1 + + return report_items, uploaded diff --git a/openpype/pipeline/editorial.py b/openpype/pipeline/editorial.py index f62a1842e0..564d78ea6f 100644 --- a/openpype/pipeline/editorial.py +++ b/openpype/pipeline/editorial.py @@ -263,16 +263,17 @@ def get_media_range_with_retimes(otio_clip, handle_start, handle_end): "retime": True, "speed": time_scalar, "timewarps": time_warp_nodes, - "handleStart": round(handle_start), - "handleEnd": round(handle_end) + "handleStart": int(round(handle_start)), + "handleEnd": int(round(handle_end)) } } returning_dict = { "mediaIn": media_in_trimmed, "mediaOut": media_out_trimmed, - "handleStart": round(handle_start), - "handleEnd": round(handle_end) + "handleStart": int(round(handle_start)), + "handleEnd": int(round(handle_end)), + "speed": time_scalar } # add version data only if retime diff --git a/openpype/pipeline/load/__init__.py b/openpype/pipeline/load/__init__.py index b6bdd13d50..e96f64f2a4 100644 --- a/openpype/pipeline/load/__init__.py +++ b/openpype/pipeline/load/__init__.py @@ -1,8 +1,11 @@ from .utils import ( HeroVersionType, + IncompatibleLoaderError, + InvalidRepresentationContext, get_repres_contexts, + get_contexts_for_repre_docs, get_subset_contexts, get_representation_context, @@ -20,6 +23,7 @@ from .utils import ( get_representation_path_from_context, get_representation_path, + get_representation_path_with_anatomy, is_compatible_loader, @@ -46,9 +50,12 @@ from .plugins import ( __all__ = ( # utils.py "HeroVersionType", + "IncompatibleLoaderError", + "InvalidRepresentationContext", "get_repres_contexts", + "get_contexts_for_repre_docs", "get_subset_contexts", "get_representation_context", @@ -66,6 +73,7 @@ __all__ = ( "get_representation_path_from_context", "get_representation_path", + "get_representation_path_with_anatomy", "is_compatible_loader", diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index 99d6876d4b..784d4628f3 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -23,6 +23,10 @@ from openpype.client import ( get_representation_by_name, get_representation_parents ) +from openpype.lib import ( + StringTemplate, + TemplateUnsolved, +) from openpype.pipeline import ( schema, legacy_io, @@ -33,7 +37,7 @@ log = logging.getLogger(__name__) ContainersFilterResult = collections.namedtuple( "ContainersFilterResult", - ["latest", "outdated", "not_foud", "invalid"] + ["latest", "outdated", "not_found", "invalid"] ) @@ -61,6 +65,11 @@ class IncompatibleLoaderError(ValueError): pass +class InvalidRepresentationContext(ValueError): + """Representation path can't be received using representation document.""" + pass + + def get_repres_contexts(representation_ids, dbcon=None): """Return parenthood context for representation. @@ -78,13 +87,20 @@ def get_repres_contexts(representation_ids, dbcon=None): if not dbcon: dbcon = legacy_io - contexts = {} if not representation_ids: - return contexts + return {} project_name = dbcon.active_project() repre_docs = get_representations(project_name, representation_ids) + return get_contexts_for_repre_docs(project_name, repre_docs) + + +def get_contexts_for_repre_docs(project_name, repre_docs): + contexts = {} + if not repre_docs: + return contexts + repre_docs_by_id = {} version_ids = set() for repre_doc in repre_docs: @@ -515,6 +531,52 @@ def get_representation_path_from_context(context): return get_representation_path(representation, root) +def get_representation_path_with_anatomy(repre_doc, anatomy): + """Receive representation path using representation document and anatomy. + + Anatomy is used to replace 'root' key in representation file. Ideally + should be used instead of 'get_representation_path' which is based on + "current context". + + Future notes: + We want also be able store resources into representation and I can + imagine the result should also contain paths to possible resources. + + Args: + repre_doc (Dict[str, Any]): Representation document. + anatomy (Anatomy): Project anatomy object. + + Returns: + Union[None, TemplateResult]: None if path can't be received + + Raises: + InvalidRepresentationContext: When representation data are probably + invalid or not available. + """ + + try: + template = repre_doc["data"]["template"] + + except KeyError: + raise InvalidRepresentationContext(( + "Representation document does not" + " contain template in data ('data.template')" + )) + + try: + context = repre_doc["context"] + context["root"] = anatomy.roots + path = StringTemplate.format_strict_template(template, context) + + except TemplateUnsolved as exc: + raise InvalidRepresentationContext(( + "Couldn't resolve representation template with available data." + " Reason: {}".format(str(exc)) + )) + + return path.normalized() + + def get_representation_path(representation, root=None, dbcon=None): """Get filename from representation document @@ -533,8 +595,6 @@ def get_representation_path(representation, root=None, dbcon=None): """ - from openpype.lib import StringTemplate, TemplateUnsolved - if dbcon is None: dbcon = legacy_io @@ -737,6 +797,7 @@ def get_outdated_containers(host=None, project_name=None): if host is None: from openpype.pipeline import registered_host + host = registered_host() if project_name is None: @@ -754,7 +815,7 @@ def filter_containers(containers, project_name): Categories are 'latest', 'outdated', 'invalid' and 'not_found'. The 'lastest' containers are from last version, 'outdated' are not, - 'invalid' are invalid containers (invalid content) and 'not_foud' has + 'invalid' are invalid containers (invalid content) and 'not_found' has some missing entity in database. Args: diff --git a/openpype/pipeline/plugin_discover.py b/openpype/pipeline/plugin_discover.py index 004e530b1c..7edd9ac290 100644 --- a/openpype/pipeline/plugin_discover.py +++ b/openpype/pipeline/plugin_discover.py @@ -2,7 +2,7 @@ import os import inspect import traceback -from openpype.api import Logger +from openpype.lib import Logger from openpype.lib.python_module_tools import ( modules_from_path, classes_from_module, diff --git a/openpype/pipeline/project_folders.py b/openpype/pipeline/project_folders.py new file mode 100644 index 0000000000..1bcba5c320 --- /dev/null +++ b/openpype/pipeline/project_folders.py @@ -0,0 +1,107 @@ +import os +import re +import json + +import six + +from openpype.settings import get_project_settings +from openpype.lib import Logger + +from .anatomy import Anatomy +from .template_data import get_project_template_data + + +def concatenate_splitted_paths(split_paths, anatomy): + log = Logger.get_logger("concatenate_splitted_paths") + pattern_array = re.compile(r"\[.*\]") + output = [] + for path_items in split_paths: + clean_items = [] + if isinstance(path_items, str): + path_items = [path_items] + + for path_item in path_items: + if not re.match(r"{.+}", path_item): + path_item = re.sub(pattern_array, "", path_item) + clean_items.append(path_item) + + # backward compatibility + if "__project_root__" in path_items: + for root, root_path in anatomy.roots.items(): + if not os.path.exists(str(root_path)): + log.debug("Root {} path path {} not exist on \ + computer!".format(root, root_path)) + continue + clean_items = ["{{root[{}]}}".format(root), + r"{project[name]}"] + clean_items[1:] + output.append(os.path.normpath(os.path.sep.join(clean_items))) + continue + + output.append(os.path.normpath(os.path.sep.join(clean_items))) + + return output + + +def fill_paths(path_list, anatomy): + format_data = get_project_template_data(project_name=anatomy.project_name) + format_data["root"] = anatomy.roots + filled_paths = [] + + for path in path_list: + new_path = path.format(**format_data) + filled_paths.append(new_path) + + return filled_paths + + +def create_project_folders(project_name, basic_paths=None): + log = Logger.get_logger("create_project_folders") + anatomy = Anatomy(project_name) + if basic_paths is None: + basic_paths = get_project_basic_paths(project_name) + + if not basic_paths: + return + + concat_paths = concatenate_splitted_paths(basic_paths, anatomy) + filled_paths = fill_paths(concat_paths, anatomy) + + # Create folders + for path in filled_paths: + if os.path.exists(path): + log.debug("Folder already exists: {}".format(path)) + else: + log.debug("Creating folder: {}".format(path)) + os.makedirs(path) + + +def _list_path_items(folder_structure): + output = [] + for key, value in folder_structure.items(): + if not value: + output.append(key) + continue + + paths = _list_path_items(value) + for path in paths: + if not isinstance(path, (list, tuple)): + path = [path] + + item = [key] + item.extend(path) + output.append(item) + + return output + + +def get_project_basic_paths(project_name): + project_settings = get_project_settings(project_name) + folder_structure = ( + project_settings["global"]["project_folder_structure"] + ) + if not folder_structure: + return [] + + if isinstance(folder_structure, six.string_types): + folder_structure = json.loads(folder_structure) + return _list_path_items(folder_structure) diff --git a/openpype/pipeline/publish/__init__.py b/openpype/pipeline/publish/__init__.py index aa7fe0bdbf..04b1a66f3a 100644 --- a/openpype/pipeline/publish/__init__.py +++ b/openpype/pipeline/publish/__init__.py @@ -1,3 +1,10 @@ +from .constants import ( + ValidatePipelineOrder, + ValidateContentsOrder, + ValidateSceneOrder, + ValidateMeshOrder, +) + from .publish_plugins import ( AbstractMetaInstancePlugin, AbstractMetaContextPlugin, @@ -7,13 +14,27 @@ from .publish_plugins import ( KnownPublishError, OpenPypePyblishPluginMixin, OptionalPyblishPluginMixin, + + RepairAction, + RepairContextAction, + + Extractor, ) from .lib import ( + get_publish_template_name, + DiscoverResult, publish_plugins_discover, load_help_content_from_plugin, load_help_content_from_filepath, + + get_errored_instances_from_context, + get_errored_plugins_from_context, + + filter_instances_for_context_plugin, + context_plugin_should_run, + get_instance_staging_dir, ) from .abstract_expected_files import ExpectedFiles @@ -24,6 +45,11 @@ from .abstract_collect_render import ( __all__ = ( + "ValidatePipelineOrder", + "ValidateContentsOrder", + "ValidateSceneOrder", + "ValidateMeshOrder", + "AbstractMetaInstancePlugin", "AbstractMetaContextPlugin", @@ -33,11 +59,25 @@ __all__ = ( "OpenPypePyblishPluginMixin", "OptionalPyblishPluginMixin", + "RepairAction", + "RepairContextAction", + + "Extractor", + + "get_publish_template_name", + "DiscoverResult", "publish_plugins_discover", "load_help_content_from_plugin", "load_help_content_from_filepath", + "get_errored_instances_from_context", + "get_errored_plugins_from_context", + + "filter_instances_for_context_plugin", + "context_plugin_should_run", + "get_instance_staging_dir", + "ExpectedFiles", "RenderInstance", diff --git a/openpype/pipeline/publish/constants.py b/openpype/pipeline/publish/constants.py new file mode 100644 index 0000000000..dcd3445200 --- /dev/null +++ b/openpype/pipeline/publish/constants.py @@ -0,0 +1,7 @@ +import pyblish.api + + +ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05 +ValidateContentsOrder = pyblish.api.ValidatorOrder + 0.1 +ValidateSceneOrder = pyblish.api.ValidatorOrder + 0.2 +ValidateMeshOrder = pyblish.api.ValidatorOrder + 0.3 diff --git a/openpype/pipeline/publish/contants.py b/openpype/pipeline/publish/contants.py new file mode 100644 index 0000000000..169eca2e5c --- /dev/null +++ b/openpype/pipeline/publish/contants.py @@ -0,0 +1,2 @@ +DEFAULT_PUBLISH_TEMPLATE = "publish" +DEFAULT_HERO_PUBLISH_TEMPLATE = "hero" diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index d5494cd8a4..c76671fa39 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -2,14 +2,198 @@ import os import sys import types import inspect +import copy +import tempfile import xml.etree.ElementTree import six import pyblish.plugin import pyblish.api -from openpype.lib import Logger -from openpype.settings import get_project_settings, get_system_settings +from openpype.lib import Logger, filter_profiles +from openpype.settings import ( + get_project_settings, + get_system_settings, +) + +from .contants import ( + DEFAULT_PUBLISH_TEMPLATE, + DEFAULT_HERO_PUBLISH_TEMPLATE, +) + + +def get_template_name_profiles( + project_name, project_settings=None, logger=None +): + """Receive profiles for publish template keys. + + At least one of arguments must be passed. + + Args: + project_name (str): Name of project where to look for templates. + project_settings(Dic[str, Any]): Prepared project settings. + + Returns: + List[Dict[str, Any]]: Publish template profiles. + """ + + if not project_name and not project_settings: + raise ValueError(( + "Both project name and project settings are missing." + " At least one must be entered." + )) + + if not project_settings: + project_settings = get_project_settings(project_name) + + profiles = ( + project_settings + ["global"] + ["tools"] + ["publish"] + ["template_name_profiles"] + ) + if profiles: + return copy.deepcopy(profiles) + + # Use legacy approach for cases new settings are not filled yet for the + # project + legacy_profiles = ( + project_settings + ["global"] + ["publish"] + ["IntegrateAssetNew"] + ["template_name_profiles"] + ) + if legacy_profiles: + if not logger: + logger = Logger.get_logger("get_template_name_profiles") + + logger.warning(( + "Project \"{}\" is using legacy access to publish template." + " It is recommended to move settings to new location" + " 'project_settings/global/tools/publish/template_name_profiles'." + ).format(project_name)) + + # Replace "tasks" key with "task_names" + profiles = [] + for profile in copy.deepcopy(legacy_profiles): + profile["task_names"] = profile.pop("tasks", []) + profiles.append(profile) + return profiles + + +def get_hero_template_name_profiles( + project_name, project_settings=None, logger=None +): + """Receive profiles for hero publish template keys. + + At least one of arguments must be passed. + + Args: + project_name (str): Name of project where to look for templates. + project_settings(Dic[str, Any]): Prepared project settings. + + Returns: + List[Dict[str, Any]]: Publish template profiles. + """ + + if not project_name and not project_settings: + raise ValueError(( + "Both project name and project settings are missing." + " At least one must be entered." + )) + + if not project_settings: + project_settings = get_project_settings(project_name) + + profiles = ( + project_settings + ["global"] + ["tools"] + ["publish"] + ["hero_template_name_profiles"] + ) + if profiles: + return copy.deepcopy(profiles) + + # Use legacy approach for cases new settings are not filled yet for the + # project + legacy_profiles = copy.deepcopy( + project_settings + ["global"] + ["publish"] + ["IntegrateHeroVersion"] + ["template_name_profiles"] + ) + if legacy_profiles: + if not logger: + logger = Logger.get_logger("get_hero_template_name_profiles") + + logger.warning(( + "Project \"{}\" is using legacy access to hero publish template." + " It is recommended to move settings to new location" + " 'project_settings/global/tools/publish/" + "hero_template_name_profiles'." + ).format(project_name)) + return legacy_profiles + + +def get_publish_template_name( + project_name, + host_name, + family, + task_name, + task_type, + project_settings=None, + hero=False, + logger=None +): + """Get template name which should be used for passed context. + + Publish templates are filtered by host name, family, task name and + task type. + + Default template which is used at if profiles are not available or profile + has empty value is defined by 'DEFAULT_PUBLISH_TEMPLATE' constant. + + Args: + project_name (str): Name of project where to look for settings. + host_name (str): Name of host integration. + family (str): Family for which should be found template. + task_name (str): Task name on which is intance working. + task_type (str): Task type on which is intance working. + project_setting (Dict[str, Any]): Prepared project settings. + logger (logging.Logger): Custom logger used for 'filter_profiles' + function. + + Returns: + str: Template name which should be used for integration. + """ + + template = None + filter_criteria = { + "hosts": host_name, + "families": family, + "task_names": task_name, + "task_types": task_type, + } + if hero: + default_template = DEFAULT_HERO_PUBLISH_TEMPLATE + profiles = get_hero_template_name_profiles( + project_name, project_settings, logger + ) + + else: + profiles = get_template_name_profiles( + project_name, project_settings, logger + ) + default_template = DEFAULT_PUBLISH_TEMPLATE + + profile = filter_profiles(profiles, filter_criteria, logger=logger) + if profile: + template = profile["template_name"] + return template or default_template class DiscoverResult: @@ -273,3 +457,178 @@ def filter_pyblish_plugins(plugins): option, value, plugin.__name__)) setattr(plugin, option, value) + + +def find_close_plugin(close_plugin_name, log): + if close_plugin_name: + plugins = pyblish.api.discover() + for plugin in plugins: + if plugin.__name__ == close_plugin_name: + return plugin + + log.debug("Close plugin not found, app might not close.") + + +def remote_publish(log, close_plugin_name=None, raise_error=False): + """Loops through all plugins, logs to console. Used for tests. + + Args: + log (openpype.lib.Logger) + close_plugin_name (str): name of plugin with responsibility to + close host app + """ + # Error exit as soon as any error occurs. + error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}" + + close_plugin = find_close_plugin(close_plugin_name, log) + + for result in pyblish.util.publish_iter(): + for record in result["records"]: + log.info("{}: {}".format( + result["plugin"].label, record.msg)) + + if result["error"]: + error_message = error_format.format(**result) + log.error(error_message) + if close_plugin: # close host app explicitly after error + context = pyblish.api.Context() + close_plugin().process(context) + if raise_error: + # Fatal Error is because of Deadline + error_message = "Fatal Error: " + error_format.format(**result) + raise RuntimeError(error_message) + + +def get_errored_instances_from_context(context): + """Collect failed instances from pyblish context. + + Args: + context (pyblish.api.Context): Publish context where we're looking + for failed instances. + + Returns: + List[pyblish.lib.Instance]: Instances which failed during processing. + """ + + instances = list() + for result in context.data["results"]: + if result["instance"] is None: + # When instance is None we are on the "context" result + continue + + if result["error"]: + instances.append(result["instance"]) + + return instances + + +def get_errored_plugins_from_context(context): + """Collect failed plugins from pyblish context. + + Args: + context (pyblish.api.Context): Publish context where we're looking + for failed plugins. + + Returns: + List[pyblish.api.Plugin]: Plugins which failed during processing. + """ + + plugins = list() + results = context.data.get("results", []) + for result in results: + if result["success"] is True: + continue + plugins.append(result["plugin"]) + + return plugins + + +def filter_instances_for_context_plugin(plugin, context): + """Filter instances on context by context plugin filters. + + This is for cases when context plugin need similar filtering like instance + plugin have, but for some reason must run on context or should find out + if there is at least one instance with a family. + + Args: + plugin (pyblish.api.Plugin): Plugin with filters. + context (pyblish.api.Context): Pyblish context with insances. + + Returns: + Iterator[pyblish.lib.Instance]: Iteration of valid instances. + """ + + instances = [] + plugin_families = set() + all_families = False + if plugin.families: + instances = context + plugin_families = set(plugin.families) + all_families = "*" in plugin_families + + for instance in instances: + # Ignore inactive instances + if ( + not instance.data.get("publish", True) + or not instance.data.get("active", True) + ): + continue + + family = instance.data.get("family") + families = instance.data.get("families") or [] + if ( + all_families + or (family and family in plugin_families) + or any(f in plugin_families for f in families) + ): + yield instance + + +def context_plugin_should_run(plugin, context): + """Return whether the ContextPlugin should run on the given context. + + This is a helper function to work around a bug pyblish-base#250 + Whenever a ContextPlugin sets specific families it will still trigger even + when no instances are present that have those families. + + This actually checks it correctly and returns whether it should run. + + Args: + plugin (pyblish.api.Plugin): Plugin with filters. + context (pyblish.api.Context): Pyblish context with insances. + + Returns: + bool: Context plugin should run based on valid instances. + """ + + for _ in filter_instances_for_context_plugin(plugin, context): + return True + return False + + +def get_instance_staging_dir(instance): + """Unified way how staging dir is stored and created on instances. + + First check if 'stagingDir' is already set in instance data. If there is + not create new in tempdir. + + Note: + Staging dir does not have to be necessarily in tempdir so be carefull + about it's usage. + + Args: + instance (pyblish.lib.Instance): Instance for which we want to get + staging dir. + + Returns: + str: Path to staging dir of instance. + """ + + staging_dir = instance.data.get("stagingDir") + if not staging_dir: + staging_dir = os.path.normpath( + tempfile.mkdtemp(prefix="pyblish_tmp_") + ) + instance.data["stagingDir"] = staging_dir + + return staging_dir diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index 71a2c675b6..6e2be1ce2c 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -1,7 +1,16 @@ from abc import ABCMeta + +import pyblish.api from pyblish.plugin import MetaPlugin, ExplicitMetaPlugin + from openpype.lib import BoolDef -from .lib import load_help_content_from_plugin + +from .lib import ( + load_help_content_from_plugin, + get_errored_instances_from_context, + get_errored_plugins_from_context, + get_instance_staging_dir, +) class AbstractMetaInstancePlugin(ABCMeta, MetaPlugin): @@ -184,3 +193,74 @@ class OptionalPyblishPluginMixin(OpenPypePyblishPluginMixin): if active is None: active = getattr(self, "active", True) return active + + +class RepairAction(pyblish.api.Action): + """Repairs the action + + To process the repairing this requires a static `repair(instance)` method + is available on the plugin. + """ + + label = "Repair" + on = "failed" # This action is only available on a failed plug-in + icon = "wrench" # Icon from Awesome Icon + + def process(self, context, plugin): + if not hasattr(plugin, "repair"): + raise RuntimeError("Plug-in does not have repair method.") + + # Get the errored instances + self.log.info("Finding failed instances..") + errored_instances = get_errored_instances_from_context(context) + + # Apply pyblish.logic to get the instances for the plug-in + instances = pyblish.api.instances_by_plugin(errored_instances, plugin) + for instance in instances: + plugin.repair(instance) + + +class RepairContextAction(pyblish.api.Action): + """Repairs the action + + To process the repairing this requires a static `repair(instance)` method + is available on the plugin. + """ + + label = "Repair" + on = "failed" # This action is only available on a failed plug-in + + def process(self, context, plugin): + if not hasattr(plugin, "repair"): + raise RuntimeError("Plug-in does not have repair method.") + + # Get the errored instances + self.log.info("Finding failed instances..") + errored_plugins = get_errored_plugins_from_context(context) + + # Apply pyblish.logic to get the instances for the plug-in + if plugin in errored_plugins: + self.log.info("Attempting fix ...") + plugin.repair(context) + + +class Extractor(pyblish.api.InstancePlugin): + """Extractor base class. + + The extractor base class implements a "staging_dir" function used to + generate a temporary directory for an instance to extract to. + + This temporary directory is generated through `tempfile.mkdtemp()` + + """ + + order = 2.0 + + def staging_dir(self, instance): + """Provide a temporary directory in which to store extracted files + + Upon calling this method the staging directory is stored inside + the instance.data['stagingDir'] + """ + + return get_instance_staging_dir(instance) diff --git a/openpype/pipeline/template_data.py b/openpype/pipeline/template_data.py index 824a25127c..627eba5c3d 100644 --- a/openpype/pipeline/template_data.py +++ b/openpype/pipeline/template_data.py @@ -28,27 +28,37 @@ def get_general_template_data(system_settings=None): } -def get_project_template_data(project_doc): +def get_project_template_data(project_doc=None, project_name=None): """Extract data from project document that are used in templates. Project document must have 'name' and (at this moment) optional key 'data.code'. + One of 'project_name' or 'project_doc' must be passed. With prepared + project document is function much faster because don't have to query. + Output contains formatting keys: - 'project[name]' - Project name - 'project[code]' - Project code Args: project_doc (Dict[str, Any]): Queried project document. + project_name (str): Name of project. Returns: Dict[str, Dict[str, str]]: Template data based on project document. """ + if not project_name: + project_name = project_doc["name"] + + if not project_doc: + project_doc = get_project(project_name, fields=["data.code"]) + project_code = project_doc.get("data", {}).get("code") return { "project": { - "name": project_doc["name"], + "name": project_name, "code": project_code } } diff --git a/openpype/pipeline/thumbnail.py b/openpype/pipeline/thumbnail.py index eb383b16d9..39f3e17893 100644 --- a/openpype/pipeline/thumbnail.py +++ b/openpype/pipeline/thumbnail.py @@ -4,6 +4,7 @@ import logging from openpype.client import get_project from . import legacy_io +from .anatomy import Anatomy from .plugin_discover import ( discover, register_plugin, @@ -73,19 +74,20 @@ class ThumbnailResolver(object): class TemplateResolver(ThumbnailResolver): - priority = 90 def process(self, thumbnail_entity, thumbnail_type): - - if not os.environ.get("AVALON_THUMBNAIL_ROOT"): - return - template = thumbnail_entity["data"].get("template") if not template: self.log.debug("Thumbnail entity does not have set template") return + thumbnail_root_format_key = "{thumbnail_root}" + thumbnail_root = os.environ.get("AVALON_THUMBNAIL_ROOT") or "" + # Check if template require thumbnail root and if is avaiable + if thumbnail_root_format_key in template and not thumbnail_root: + return + project_name = self.dbcon.active_project() project = get_project(project_name, fields=["name", "data.code"]) @@ -95,12 +97,16 @@ class TemplateResolver(ThumbnailResolver): template_data.update({ "_id": str(thumbnail_entity["_id"]), "thumbnail_type": thumbnail_type, - "thumbnail_root": os.environ.get("AVALON_THUMBNAIL_ROOT"), + "thumbnail_root": thumbnail_root, "project": { "name": project["name"], "code": project["data"].get("code") - } + }, }) + # Add anatomy roots if is in template + if "{root" in template: + anatomy = Anatomy(project_name) + template_data["root"] = anatomy.roots try: filepath = os.path.normpath(template.format(**template_data)) diff --git a/openpype/pipeline/workfile/__init__.py b/openpype/pipeline/workfile/__init__.py index 0aad29b6f9..94ecc81bd6 100644 --- a/openpype/pipeline/workfile/__init__.py +++ b/openpype/pipeline/workfile/__init__.py @@ -9,6 +9,8 @@ from .path_resolving import ( get_custom_workfile_template, get_custom_workfile_template_by_string_context, + + create_workdir_extra_folders, ) from .build_workfile import BuildWorkfile @@ -26,5 +28,7 @@ __all__ = ( "get_custom_workfile_template", "get_custom_workfile_template_by_string_context", + "create_workdir_extra_folders", + "BuildWorkfile", ) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py deleted file mode 100644 index 05a98a1ddc..0000000000 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ /dev/null @@ -1,526 +0,0 @@ -import os -from abc import ABCMeta, abstractmethod - -import six -import logging -from functools import reduce - -from openpype.client import get_asset_by_name -from openpype.settings import get_project_settings -from openpype.lib import ( - StringTemplate, - Logger, - filter_profiles, - get_linked_assets, -) -from openpype.pipeline import legacy_io, Anatomy -from openpype.pipeline.load import ( - get_loaders_by_name, - get_representation_context, - load_with_repre_context, -) - -from .build_template_exceptions import ( - TemplateAlreadyImported, - TemplateLoadingFailed, - TemplateProfileNotFound, - TemplateNotFound -) - -log = logging.getLogger(__name__) - - -def update_representations(entities, entity): - if entity['context']['subset'] not in entities: - entities[entity['context']['subset']] = entity - else: - current = entities[entity['context']['subset']] - incomming = entity - entities[entity['context']['subset']] = max( - current, incomming, - key=lambda entity: entity["context"].get("version", -1)) - - return entities - - -def parse_loader_args(loader_args): - if not loader_args: - return dict() - try: - parsed_args = eval(loader_args) - if not isinstance(parsed_args, dict): - return dict() - else: - return parsed_args - except Exception as err: - print( - "Error while parsing loader arguments '{}'.\n{}: {}\n\n" - "Continuing with default arguments. . .".format( - loader_args, - err.__class__.__name__, - err)) - return dict() - - -@six.add_metaclass(ABCMeta) -class AbstractTemplateLoader: - """ - Abstraction of Template Loader. - Properties: - template_path : property to get current template path - Methods: - import_template : Abstract Method. Used to load template, - depending on current host - get_template_nodes : Abstract Method. Used to query nodes acting - as placeholders. Depending on current host - """ - - _log = None - - def __init__(self, placeholder_class): - # TODO template loader should expect host as and argument - # - host have all responsibility for most of code (also provide - # placeholder class) - # - also have responsibility for current context - # - this won't work in DCCs where multiple workfiles with - # different contexts can be opened at single time - # - template loader should have ability to change context - project_name = legacy_io.active_project() - asset_name = legacy_io.Session["AVALON_ASSET"] - - self.loaders_by_name = get_loaders_by_name() - self.current_asset = asset_name - self.project_name = project_name - self.host_name = legacy_io.Session["AVALON_APP"] - self.task_name = legacy_io.Session["AVALON_TASK"] - self.placeholder_class = placeholder_class - self.current_asset_doc = get_asset_by_name(project_name, asset_name) - self.task_type = ( - self.current_asset_doc - .get("data", {}) - .get("tasks", {}) - .get(self.task_name, {}) - .get("type") - ) - - self.log.info( - "BUILDING ASSET FROM TEMPLATE :\n" - "Starting templated build for {asset} in {project}\n\n" - "Asset : {asset}\n" - "Task : {task_name} ({task_type})\n" - "Host : {host}\n" - "Project : {project}\n".format( - asset=self.current_asset, - host=self.host_name, - project=self.project_name, - task_name=self.task_name, - task_type=self.task_type - )) - # Skip if there is no loader - if not self.loaders_by_name: - self.log.warning( - "There is no registered loaders. No assets will be loaded") - return - - @property - def log(self): - if self._log is None: - self._log = Logger.get_logger(self.__class__.__name__) - return self._log - - def template_already_imported(self, err_msg): - """In case template was already loaded. - Raise the error as a default action. - Override this method in your template loader implementation - to manage this case.""" - self.log.error("{}: {}".format( - err_msg.__class__.__name__, - err_msg)) - raise TemplateAlreadyImported(err_msg) - - def template_loading_failed(self, err_msg): - """In case template loading failed - Raise the error as a default action. - Override this method in your template loader implementation - to manage this case. - """ - self.log.error("{}: {}".format( - err_msg.__class__.__name__, - err_msg)) - raise TemplateLoadingFailed(err_msg) - - @property - def template_path(self): - """ - Property returning template path. Avoiding setter. - Getting template path from open pype settings based on current avalon - session and solving the path variables if needed. - Returns: - str: Solved template path - Raises: - TemplateProfileNotFound: No profile found from settings for - current avalon session - KeyError: Could not solve path because a key does not exists - in avalon context - TemplateNotFound: Solved path does not exists on current filesystem - """ - project_name = self.project_name - host_name = self.host_name - task_name = self.task_name - task_type = self.task_type - - anatomy = Anatomy(project_name) - project_settings = get_project_settings(project_name) - - build_info = project_settings[host_name]["templated_workfile_build"] - profile = filter_profiles( - build_info["profiles"], - { - "task_types": task_type, - "tasks": task_name - } - ) - - if not profile: - raise TemplateProfileNotFound( - "No matching profile found for task '{}' of type '{}' " - "with host '{}'".format(task_name, task_type, host_name) - ) - - path = profile["path"] - if not path: - raise TemplateLoadingFailed( - "Template path is not set.\n" - "Path need to be set in {}\\Template Workfile Build " - "Settings\\Profiles".format(host_name.title())) - - # Try fill path with environments and anatomy roots - fill_data = { - key: value - for key, value in os.environ.items() - } - fill_data["root"] = anatomy.roots - result = StringTemplate.format_template(path, fill_data) - if result.solved: - path = result.normalized() - - if path and os.path.exists(path): - self.log.info("Found template at: '{}'".format(path)) - return path - - solved_path = None - while True: - try: - solved_path = anatomy.path_remapper(path) - except KeyError as missing_key: - raise KeyError( - "Could not solve key '{}' in template path '{}'".format( - missing_key, path)) - - if solved_path is None: - solved_path = path - if solved_path == path: - break - path = solved_path - - solved_path = os.path.normpath(solved_path) - if not os.path.exists(solved_path): - raise TemplateNotFound( - "Template found in openPype settings for task '{}' with host " - "'{}' does not exists. (Not found : {})".format( - task_name, host_name, solved_path)) - - self.log.info("Found template at: '{}'".format(solved_path)) - - return solved_path - - def populate_template(self, ignored_ids=None): - """ - Use template placeholders to load assets and parent them in hierarchy - Arguments : - ignored_ids : - Returns: - None - """ - - loaders_by_name = self.loaders_by_name - current_asset_doc = self.current_asset_doc - linked_assets = get_linked_assets(current_asset_doc) - - ignored_ids = ignored_ids or [] - placeholders = self.get_placeholders() - self.log.debug("Placeholders found in template: {}".format( - [placeholder.name for placeholder in placeholders] - )) - for placeholder in placeholders: - self.log.debug("Start to processing placeholder {}".format( - placeholder.name - )) - placeholder_representations = self.get_placeholder_representations( - placeholder, - current_asset_doc, - linked_assets - ) - - if not placeholder_representations: - self.log.info( - "There's no representation for this placeholder: " - "{}".format(placeholder.name) - ) - continue - - for representation in placeholder_representations: - self.preload(placeholder, loaders_by_name, representation) - - if self.load_data_is_incorrect( - placeholder, - representation, - ignored_ids): - continue - - self.log.info( - "Loading {}_{} with loader {}\n" - "Loader arguments used : {}".format( - representation['context']['asset'], - representation['context']['subset'], - placeholder.loader_name, - placeholder.loader_args)) - - try: - container = self.load( - placeholder, loaders_by_name, representation) - except Exception: - self.load_failed(placeholder, representation) - else: - self.load_succeed(placeholder, container) - finally: - self.postload(placeholder) - - def get_placeholder_representations( - self, placeholder, current_asset_doc, linked_asset_docs - ): - placeholder_representations = placeholder.get_representations( - current_asset_doc, - linked_asset_docs - ) - for repre_doc in reduce( - update_representations, - placeholder_representations, - dict() - ).values(): - yield repre_doc - - def load_data_is_incorrect( - self, placeholder, last_representation, ignored_ids): - if not last_representation: - self.log.warning(placeholder.err_message()) - return True - if (str(last_representation['_id']) in ignored_ids): - print("Ignoring : ", last_representation['_id']) - return True - return False - - def preload(self, placeholder, loaders_by_name, last_representation): - pass - - def load(self, placeholder, loaders_by_name, last_representation): - repre = get_representation_context(last_representation) - return load_with_repre_context( - loaders_by_name[placeholder.loader_name], - repre, - options=parse_loader_args(placeholder.loader_args)) - - def load_succeed(self, placeholder, container): - placeholder.parent_in_hierarchy(container) - - def load_failed(self, placeholder, last_representation): - self.log.warning( - "Got error trying to load {}:{} with {}".format( - last_representation['context']['asset'], - last_representation['context']['subset'], - placeholder.loader_name - ), - exc_info=True - ) - - def postload(self, placeholder): - placeholder.clean() - - def update_missing_containers(self): - loaded_containers_ids = self.get_loaded_containers_by_id() - self.populate_template(ignored_ids=loaded_containers_ids) - - def get_placeholders(self): - placeholders = map(self.placeholder_class, self.get_template_nodes()) - valid_placeholders = filter( - lambda i: i.is_valid, - placeholders - ) - sorted_placeholders = list(sorted( - valid_placeholders, - key=lambda i: i.order - )) - return sorted_placeholders - - @abstractmethod - def get_loaded_containers_by_id(self): - """ - Collect already loaded containers for updating scene - Return: - dict (string, node): A dictionnary id as key - and containers as value - """ - pass - - @abstractmethod - def import_template(self, template_path): - """ - Import template in current host - Args: - template_path (str): fullpath to current task and - host's template file - Return: - None - """ - pass - - @abstractmethod - def get_template_nodes(self): - """ - Returning a list of nodes acting as host placeholders for - templating. The data representation is by user. - AbstractLoadTemplate (and LoadTemplate) won't directly manipulate nodes - Args : - None - Returns: - list(AnyNode): Solved template path - """ - pass - - -@six.add_metaclass(ABCMeta) -class AbstractPlaceholder: - """Abstraction of placeholders logic. - - Properties: - required_keys: A list of mandatory keys to decribe placeholder - and assets to load. - optional_keys: A list of optional keys to decribe - placeholder and assets to load - loader_name: Name of linked loader to use while loading assets - - Args: - identifier (str): Placeholder identifier. Should be possible to be - used as identifier in "a scene" (e.g. unique node name). - """ - - required_keys = { - "builder_type", - "family", - "representation", - "order", - "loader", - "loader_args" - } - optional_keys = {} - - def __init__(self, identifier): - self._log = None - self._name = identifier - self.get_data(identifier) - - @property - def log(self): - if self._log is None: - self._log = Logger.get_logger(repr(self)) - return self._log - - def __repr__(self): - return "< {} {} >".format(self.__class__.__name__, self.name) - - @property - def name(self): - return self._name - - @property - def loader_args(self): - return self.data["loader_args"] - - @property - def builder_type(self): - return self.data["builder_type"] - - @property - def order(self): - return self.data["order"] - - @property - def loader_name(self): - """Return placeholder loader name. - - Returns: - str: Loader name that will be used to load placeholder - representations. - """ - - return self.data["loader"] - - @property - def is_valid(self): - """Test validity of placeholder. - - i.e.: every required key exists in placeholder data - - Returns: - bool: True if every key is in data - """ - - if set(self.required_keys).issubset(self.data.keys()): - self.log.debug("Valid placeholder : {}".format(self.name)) - return True - self.log.info("Placeholder is not valid : {}".format(self.name)) - return False - - @abstractmethod - def parent_in_hierarchy(self, container): - """Place loaded container in correct hierarchy given by placeholder - - Args: - container (Dict[str, Any]): Loaded container created by loader. - """ - - pass - - @abstractmethod - def clean(self): - """Clean placeholder from hierarchy after loading assets.""" - - pass - - @abstractmethod - def get_representations(self, current_asset_doc, linked_asset_docs): - """Query representations based on placeholder data. - - Args: - current_asset_doc (Dict[str, Any]): Document of current - context asset. - linked_asset_docs (List[Dict[str, Any]]): Documents of assets - linked to current context asset. - - Returns: - Iterable[Dict[str, Any]]: Representations that are matching - placeholder filters. - """ - - pass - - @abstractmethod - def get_data(self, identifier): - """Collect information about placeholder by identifier. - - Args: - identifier (str): A unique placeholder identifier defined by - implementation. - """ - - pass diff --git a/openpype/pipeline/workfile/build_template.py b/openpype/pipeline/workfile/build_template.py deleted file mode 100644 index e6396578c5..0000000000 --- a/openpype/pipeline/workfile/build_template.py +++ /dev/null @@ -1,68 +0,0 @@ -from importlib import import_module -from openpype.lib import classes_from_module -from openpype.host import HostBase -from openpype.pipeline import registered_host - -from .abstract_template_loader import ( - AbstractPlaceholder, - AbstractTemplateLoader) - -from .build_template_exceptions import ( - TemplateLoadingFailed, - TemplateAlreadyImported, - MissingHostTemplateModule, - MissingTemplatePlaceholderClass, - MissingTemplateLoaderClass -) - -_module_path_format = 'openpype.hosts.{host}.api.template_loader' - - -def build_workfile_template(*args): - template_loader = build_template_loader() - try: - template_loader.import_template(template_loader.template_path) - except TemplateAlreadyImported as err: - template_loader.template_already_imported(err) - except TemplateLoadingFailed as err: - template_loader.template_loading_failed(err) - else: - template_loader.populate_template() - - -def update_workfile_template(args): - template_loader = build_template_loader() - template_loader.update_missing_containers() - - -def build_template_loader(): - # TODO refactor to use advantage of 'HostBase' and don't import dynamically - # - hosts should have methods that gives option to return builders - host = registered_host() - if isinstance(host, HostBase): - host_name = host.name - else: - host_name = host.__name__.partition('.')[2] - module_path = _module_path_format.format(host=host_name) - module = import_module(module_path) - if not module: - raise MissingHostTemplateModule( - "No template loader found for host {}".format(host_name)) - - template_loader_class = classes_from_module( - AbstractTemplateLoader, - module - ) - template_placeholder_class = classes_from_module( - AbstractPlaceholder, - module - ) - - if not template_loader_class: - raise MissingTemplateLoaderClass() - template_loader_class = template_loader_class[0] - - if not template_placeholder_class: - raise MissingTemplatePlaceholderClass() - template_placeholder_class = template_placeholder_class[0] - return template_loader_class(template_placeholder_class) diff --git a/openpype/pipeline/workfile/build_template_exceptions.py b/openpype/pipeline/workfile/build_template_exceptions.py deleted file mode 100644 index 7a5075e3dc..0000000000 --- a/openpype/pipeline/workfile/build_template_exceptions.py +++ /dev/null @@ -1,35 +0,0 @@ -class MissingHostTemplateModule(Exception): - """Error raised when expected module does not exists""" - pass - - -class MissingTemplatePlaceholderClass(Exception): - """Error raised when module doesn't implement a placeholder class""" - pass - - -class MissingTemplateLoaderClass(Exception): - """Error raised when module doesn't implement a template loader class""" - pass - - -class TemplateNotFound(Exception): - """Exception raised when template does not exist.""" - pass - - -class TemplateProfileNotFound(Exception): - """Exception raised when current profile - doesn't match any template profile""" - pass - - -class TemplateAlreadyImported(Exception): - """Error raised when Template was already imported by host for - this session""" - pass - - -class TemplateLoadingFailed(Exception): - """Error raised whend Template loader was unable to load the template""" - pass diff --git a/openpype/pipeline/workfile/build_workfile.py b/openpype/pipeline/workfile/build_workfile.py index bb6fcb4189..87b9df158f 100644 --- a/openpype/pipeline/workfile/build_workfile.py +++ b/openpype/pipeline/workfile/build_workfile.py @@ -1,3 +1,14 @@ +"""Workfile build based on settings. + +Workfile builder will do stuff based on project settings. Advantage is that +it need only access to settings. Disadvantage is that it is hard to focus +build per context and being explicit about loaded content. + +For more explicit workfile build is recommended 'AbstractTemplateBuilder' +from '~/openpype/pipeline/workfile/workfile_template_builder'. Which gives +more abilities to define how build happens but require more code to achive it. +""" + import os import re import collections @@ -8,10 +19,10 @@ from openpype.client import ( get_subsets, get_last_versions, get_representations, + get_linked_assets, ) from openpype.settings import get_project_settings from openpype.lib import ( - get_linked_assets, filter_profiles, Logger, ) diff --git a/openpype/pipeline/workfile/lock_workfile.py b/openpype/pipeline/workfile/lock_workfile.py new file mode 100644 index 0000000000..fbec44247a --- /dev/null +++ b/openpype/pipeline/workfile/lock_workfile.py @@ -0,0 +1,82 @@ +import os +import json +from uuid import uuid4 +from openpype.lib import Logger, filter_profiles +from openpype.lib.pype_info import get_workstation_info +from openpype.settings import get_project_settings + + +def _read_lock_file(lock_filepath): + if not os.path.exists(lock_filepath): + log = Logger.get_logger("_read_lock_file") + log.debug("lock file is not created or readable as expected!") + with open(lock_filepath, "r") as stream: + data = json.load(stream) + return data + + +def _get_lock_file(filepath): + return filepath + ".oplock" + + +def is_workfile_locked(filepath): + lock_filepath = _get_lock_file(filepath) + if not os.path.exists(lock_filepath): + return False + return True + + +def get_workfile_lock_data(filepath): + lock_filepath = _get_lock_file(filepath) + return _read_lock_file(lock_filepath) + + +def is_workfile_locked_for_current_process(filepath): + if not is_workfile_locked(filepath): + return False + + lock_filepath = _get_lock_file(filepath) + data = _read_lock_file(lock_filepath) + return data["process_id"] == _get_process_id() + + +def delete_workfile_lock(filepath): + lock_filepath = _get_lock_file(filepath) + if os.path.exists(lock_filepath): + os.remove(lock_filepath) + + +def create_workfile_lock(filepath): + lock_filepath = _get_lock_file(filepath) + info = get_workstation_info() + info["process_id"] = _get_process_id() + with open(lock_filepath, "w") as stream: + json.dump(info, stream) + + +def remove_workfile_lock(filepath): + if is_workfile_locked_for_current_process(filepath): + delete_workfile_lock(filepath) + + +def _get_process_id(): + process_id = os.environ.get("OPENPYPE_PROCESS_ID") + if not process_id: + process_id = str(uuid4()) + os.environ["OPENPYPE_PROCESS_ID"] = process_id + return process_id + + +def is_workfile_lock_enabled(host_name, project_name, project_setting=None): + if project_setting is None: + project_setting = get_project_settings(project_name) + workfile_lock_profiles = ( + project_setting + ["global"] + ["tools"] + ["Workfiles"] + ["workfile_lock_profiles"]) + profile = filter_profiles(workfile_lock_profiles, {"host_name": host_name}) + if not profile: + return False + return profile["enabled"] diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py index ed1d1d793e..801cb7223c 100644 --- a/openpype/pipeline/workfile/path_resolving.py +++ b/openpype/pipeline/workfile/path_resolving.py @@ -265,6 +265,10 @@ def get_last_workfile_with_version( if not match: continue + if not match.groups(): + output_filenames.append(filename) + continue + file_version = int(match.group(1)) if version is None or file_version > version: output_filenames[:] = [] @@ -419,9 +423,14 @@ def get_custom_workfile_template( # when path is available try to format it in case # there are some anatomy template strings if matching_item: + # extend anatomy context with os.environ to + # also allow formatting against env + full_context_data = os.environ.copy() + full_context_data.update(anatomy_context_data) + template = matching_item["path"][platform.system().lower()] return StringTemplate.format_strict_template( - template, anatomy_context_data + template, full_context_data ).normalized() return None @@ -462,3 +471,60 @@ def get_custom_workfile_template_by_string_context( return get_custom_workfile_template( project_doc, asset_doc, task_name, host_name, anatomy, project_settings ) + + +def create_workdir_extra_folders( + workdir, + host_name, + task_type, + task_name, + project_name, + project_settings=None +): + """Create extra folders in work directory based on context. + + Args: + workdir (str): Path to workdir where workfiles is stored. + host_name (str): Name of host implementation. + task_type (str): Type of task for which extra folders should be + created. + task_name (str): Name of task for which extra folders should be + created. + project_name (str): Name of project on which task is. + project_settings (dict): Prepared project settings. Are loaded if not + passed. + """ + + # Load project settings if not set + if not project_settings: + project_settings = get_project_settings(project_name) + + # Load extra folders profiles + extra_folders_profiles = ( + project_settings["global"]["tools"]["Workfiles"]["extra_folders"] + ) + # Skip if are empty + if not extra_folders_profiles: + return + + # Prepare profiles filters + filter_data = { + "task_types": task_type, + "task_names": task_name, + "hosts": host_name + } + profile = filter_profiles(extra_folders_profiles, filter_data) + if profile is None: + return + + for subfolder in profile["folders"]: + # Make sure backslashes are converted to forwards slashes + # and does not start with slash + subfolder = subfolder.replace("\\", "/").lstrip("/") + # Skip empty strings + if not subfolder: + continue + + fullpath = os.path.join(workdir, subfolder) + if not os.path.exists(fullpath): + os.makedirs(fullpath) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py new file mode 100644 index 0000000000..582657c735 --- /dev/null +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -0,0 +1,1451 @@ +"""Workfile build mechanism using workfile templates. + +Build templates are manually prepared using plugin definitions which create +placeholders inside the template which are populated on import. + +This approach is very explicit to achive very specific build logic that can be +targeted by task types and names. + +Placeholders are created using placeholder plugins which should care about +logic and data of placeholder items. 'PlaceholderItem' is used to keep track +about it's progress. +""" + +import os +import re +import collections +import copy +from abc import ABCMeta, abstractmethod + +import six + +from openpype.client import ( + get_asset_by_name, + get_linked_assets, + get_representations, +) +from openpype.settings import ( + get_project_settings, + get_system_settings, +) +from openpype.host import HostBase +from openpype.lib import ( + Logger, + StringTemplate, + filter_profiles, + attribute_definitions, +) +from openpype.lib.attribute_definitions import get_attributes_keys +from openpype.pipeline import legacy_io, Anatomy +from openpype.pipeline.load import ( + get_loaders_by_name, + get_contexts_for_repre_docs, + load_with_repre_context, +) +from openpype.pipeline.create import get_legacy_creator_by_name + + +class TemplateNotFound(Exception): + """Exception raised when template does not exist.""" + pass + + +class TemplateProfileNotFound(Exception): + """Exception raised when current profile + doesn't match any template profile""" + pass + + +class TemplateAlreadyImported(Exception): + """Error raised when Template was already imported by host for + this session""" + pass + + +class TemplateLoadFailed(Exception): + """Error raised whend Template loader was unable to load the template""" + pass + + +@six.add_metaclass(ABCMeta) +class AbstractTemplateBuilder(object): + """Abstraction of Template Builder. + + Builder cares about context, shared data, cache, discovery of plugins + and trigger logic. Provides public api for host workfile build systen. + + Rest of logic is based on plugins that care about collection and creation + of placeholder items. + + Population of placeholders happens in loops. Each loop will collect all + available placeholders, skip already populated, and populate the rest. + + Builder item has 2 types of shared data. Refresh lifetime which are cleared + on refresh and populate lifetime which are cleared after loop of + placeholder population. + + Args: + host (Union[HostBase, ModuleType]): Implementation of host. + """ + + _log = None + + def __init__(self, host): + # Get host name + if isinstance(host, HostBase): + host_name = host.name + else: + host_name = os.environ.get("AVALON_APP") + + self._host = host + self._host_name = host_name + + # Shared data across placeholder plugins + self._shared_data = {} + self._shared_populate_data = {} + + # Where created objects of placeholder plugins will be stored + self._placeholder_plugins = None + self._loaders_by_name = None + self._creators_by_name = None + + self._system_settings = None + self._project_settings = None + + self._current_asset_doc = None + self._linked_asset_docs = None + self._task_type = None + + @property + def project_name(self): + return legacy_io.active_project() + + @property + def current_asset_name(self): + return legacy_io.Session["AVALON_ASSET"] + + @property + def current_task_name(self): + return legacy_io.Session["AVALON_TASK"] + + @property + def system_settings(self): + if self._system_settings is None: + self._system_settings = get_system_settings() + return self._system_settings + + @property + def project_settings(self): + if self._project_settings is None: + self._project_settings = get_project_settings(self.project_name) + return self._project_settings + + @property + def current_asset_doc(self): + if self._current_asset_doc is None: + self._current_asset_doc = get_asset_by_name( + self.project_name, self.current_asset_name + ) + return self._current_asset_doc + + @property + def linked_asset_docs(self): + if self._linked_asset_docs is None: + self._linked_asset_docs = get_linked_assets( + self.current_asset_doc + ) + return self._linked_asset_docs + + @property + def current_task_type(self): + asset_doc = self.current_asset_doc + if not asset_doc: + return None + return ( + asset_doc + .get("data", {}) + .get("tasks", {}) + .get(self.current_task_name, {}) + .get("type") + ) + + def get_placeholder_plugin_classes(self): + """Get placeholder plugin classes that can be used to build template. + + Default implementation looks for method + 'get_workfile_build_placeholder_plugins' on host. + + Returns: + List[PlaceholderPlugin]: Plugin classes available for host. + """ + + if hasattr(self._host, "get_workfile_build_placeholder_plugins"): + return self._host.get_workfile_build_placeholder_plugins() + return [] + + @property + def host(self): + """Access to host implementation. + + Returns: + Union[HostBase, ModuleType]: Implementation of host. + """ + + return self._host + + @property + def host_name(self): + """Name of 'host' implementation. + + Returns: + str: Host's name. + """ + + return self._host_name + + @property + def log(self): + """Dynamically created logger for the plugin.""" + + if self._log is None: + self._log = Logger.get_logger(repr(self)) + return self._log + + def refresh(self): + """Reset cached data.""" + + self._placeholder_plugins = None + self._loaders_by_name = None + self._creators_by_name = None + + self._current_asset_doc = None + self._linked_asset_docs = None + self._task_type = None + + self._system_settings = None + self._project_settings = None + + self.clear_shared_data() + self.clear_shared_populate_data() + + def get_loaders_by_name(self): + if self._loaders_by_name is None: + self._loaders_by_name = get_loaders_by_name() + return self._loaders_by_name + + def get_creators_by_name(self): + if self._creators_by_name is None: + self._creators_by_name = get_legacy_creator_by_name() + return self._creators_by_name + + def get_shared_data(self, key): + """Receive shared data across plugins and placeholders. + + This can be used to scroll scene only once to look for placeholder + items if the storing is unified but each placeholder plugin would have + to call it again. + + Args: + key (str): Key under which are shared data stored. + + Returns: + Union[None, Any]: None if key was not set. + """ + + return self._shared_data.get(key) + + def set_shared_data(self, key, value): + """Store share data across plugins and placeholders. + + Store data that can be afterwards accessed from any future call. It + is good practice to check if the same value is not already stored under + different key or if the key is not already used for something else. + + Key should be self explanatory to content. + - wrong: 'asset' + - good: 'asset_name' + + Args: + key (str): Key under which is key stored. + value (Any): Value that should be stored under the key. + """ + + self._shared_data[key] = value + + def clear_shared_data(self): + """Clear shared data. + + Method only clear shared data to default state. + """ + + self._shared_data = {} + + def clear_shared_populate_data(self): + """Receive shared data across plugins and placeholders. + + These data are cleared after each loop of populating of template. + + This can be used to scroll scene only once to look for placeholder + items if the storing is unified but each placeholder plugin would have + to call it again. + + Args: + key (str): Key under which are shared data stored. + + Returns: + Union[None, Any]: None if key was not set. + """ + + self._shared_populate_data = {} + + def get_shared_populate_data(self, key): + """Store share populate data across plugins and placeholders. + + These data are cleared after each loop of populating of template. + + Store data that can be afterwards accessed from any future call. It + is good practice to check if the same value is not already stored under + different key or if the key is not already used for something else. + + Key should be self explanatory to content. + - wrong: 'asset' + - good: 'asset_name' + + Args: + key (str): Key under which is key stored. + value (Any): Value that should be stored under the key. + """ + + return self._shared_populate_data.get(key) + + def set_shared_populate_data(self, key, value): + """Store share populate data across plugins and placeholders. + + These data are cleared after each loop of populating of template. + + Store data that can be afterwards accessed from any future call. It + is good practice to check if the same value is not already stored under + different key or if the key is not already used for something else. + + Key should be self explanatory to content. + - wrong: 'asset' + - good: 'asset_name' + + Args: + key (str): Key under which is key stored. + value (Any): Value that should be stored under the key. + """ + + self._shared_populate_data[key] = value + + @property + def placeholder_plugins(self): + """Access to initialized placeholder plugins. + + Returns: + List[PlaceholderPlugin]: Initialized plugins available for host. + """ + + if self._placeholder_plugins is None: + placeholder_plugins = {} + for cls in self.get_placeholder_plugin_classes(): + try: + plugin = cls(self) + placeholder_plugins[plugin.identifier] = plugin + + except Exception: + self.log.warning( + "Failed to initialize placeholder plugin {}".format( + cls.__name__ + ), + exc_info=True + ) + + self._placeholder_plugins = placeholder_plugins + return self._placeholder_plugins + + def create_placeholder(self, plugin_identifier, placeholder_data): + """Create new placeholder using plugin identifier and data. + + Args: + plugin_identifier (str): Identifier of plugin. That's how builder + know which plugin should be used. + placeholder_data (Dict[str, Any]): Placeholder item data. They + should match options required by the plugin. + + Returns: + PlaceholderItem: Created placeholder item. + """ + + plugin = self.placeholder_plugins[plugin_identifier] + return plugin.create_placeholder(placeholder_data) + + def get_placeholders(self): + """Collect placeholder items from scene. + + Each placeholder plugin can collect it's placeholders and return them. + This method does not use cached values but always go through the scene. + + Returns: + List[PlaceholderItem]: Sorted placeholder items. + """ + + placeholders = [] + for placeholder_plugin in self.placeholder_plugins.values(): + result = placeholder_plugin.collect_placeholders() + if result: + placeholders.extend(result) + + return list(sorted( + placeholders, + key=lambda i: i.order + )) + + def build_template(self, template_path=None, level_limit=None): + """Main callback for building workfile from template path. + + Todo: + Handle report of populated placeholders from + 'populate_scene_placeholders' to be shown to a user. + + Args: + template_path (str): Path to a template file with placeholders. + Template from settings 'get_template_path' used when not + passed. + level_limit (int): Limit of populate loops. Related to + 'populate_scene_placeholders' method. + """ + + if template_path is None: + template_path = self.get_template_path() + self.import_template(template_path) + self.populate_scene_placeholders(level_limit) + + def rebuild_template(self): + """Go through existing placeholders in scene and update them. + + This could not make sense for all plugin types so this is optional + logic for plugins. + + Note: + Logic is not importing the template again but using placeholders + that were already available. We should maybe change the method + name. + + Question: + Should this also handle subloops as it is possible that another + template is loaded during processing? + """ + + if not self.placeholder_plugins: + self.log.info("There are no placeholder plugins available.") + return + + placeholders = self.get_placeholders() + if not placeholders: + self.log.info("No placeholders were found.") + return + + for placeholder in placeholders: + plugin = placeholder.plugin + plugin.repopulate_placeholder(placeholder) + + self.clear_shared_populate_data() + + @abstractmethod + def import_template(self, template_path): + """ + Import template in current host. + + Should load the content of template into scene so + 'populate_scene_placeholders' can be started. + + Args: + template_path (str): Fullpath for current task and + host's template file. + """ + + pass + + def _prepare_placeholders(self, placeholders): + """Run preparation part for placeholders on plugins. + + Args: + placeholders (List[PlaceholderItem]): Placeholder items that will + be processed. + """ + + # Prepare placeholder items by plugin + plugins_by_identifier = {} + placeholders_by_plugin_id = collections.defaultdict(list) + for placeholder in placeholders: + plugin = placeholder.plugin + identifier = plugin.identifier + plugins_by_identifier[identifier] = plugin + placeholders_by_plugin_id[identifier].append(placeholder) + + # Plugin should prepare data for passed placeholders + for identifier, placeholders in placeholders_by_plugin_id.items(): + plugin = plugins_by_identifier[identifier] + plugin.prepare_placeholders(placeholders) + + def populate_scene_placeholders(self, level_limit=None): + """Find placeholders in scene using plugins and process them. + + This should happen after 'import_template'. + + Collect available placeholders from scene. All of them are processed + after that shared data are cleared. Placeholder items are collected + again and if there are any new the loop happens again. This is possible + to change with defying 'level_limit'. + + Placeholders are marked as processed so they're not re-processed. To + identify which placeholders were already processed is used + placeholder's 'scene_identifier'. + + Args: + level_limit (int): Level of loops that can happen. Default is 1000. + """ + + if not self.placeholder_plugins: + self.log.warning("There are no placeholder plugins available.") + return + + placeholders = self.get_placeholders() + if not placeholders: + self.log.warning("No placeholders were found.") + return + + # Avoid infinite loop + # - 1000 iterations of placeholders processing must be enough + if not level_limit: + level_limit = 1000 + + placeholder_by_scene_id = { + placeholder.scene_identifier: placeholder + for placeholder in placeholders + } + all_processed = len(placeholders) == 0 + # Counter is checked at the ned of a loop so the loop happens at least + # once. + iter_counter = 0 + while not all_processed: + filtered_placeholders = [] + for placeholder in placeholders: + if placeholder.finished: + continue + + if placeholder.in_progress: + self.log.warning(( + "Placeholder that should be processed" + " is already in progress." + )) + continue + filtered_placeholders.append(placeholder) + + self._prepare_placeholders(filtered_placeholders) + + for placeholder in filtered_placeholders: + placeholder.set_in_progress() + placeholder_plugin = placeholder.plugin + try: + placeholder_plugin.populate_placeholder(placeholder) + + except Exception as exc: + self.log.warning( + ( + "Failed to process placeholder {} with plugin {}" + ).format( + placeholder.scene_identifier, + placeholder_plugin.__class__.__name__ + ), + exc_info=True + ) + placeholder.set_failed(exc) + + placeholder.set_finished() + + # Clear shared data before getting new placeholders + self.clear_shared_populate_data() + + iter_counter += 1 + if iter_counter >= level_limit: + break + + all_processed = True + collected_placeholders = self.get_placeholders() + for placeholder in collected_placeholders: + identifier = placeholder.scene_identifier + if identifier in placeholder_by_scene_id: + continue + + all_processed = False + placeholder_by_scene_id[identifier] = placeholder + placeholders.append(placeholder) + + self.refresh() + + def _get_build_profiles(self): + """Get build profiles for workfile build template path. + + Returns: + List[Dict[str, Any]]: Profiles for template path resolving. + """ + + return ( + self.project_settings + [self.host_name] + ["templated_workfile_build"] + ["profiles"] + ) + + def get_template_path(self): + """Unified way how template path is received usign settings. + + Method is dependent on '_get_build_profiles' which should return filter + profiles to resolve path to a template. Default implementation looks + into host settings: + - 'project_settings/{host name}/templated_workfile_build/profiles' + + Returns: + str: Path to a template file with placeholders. + + Raises: + TemplateProfileNotFound: When profiles are not filled. + TemplateLoadFailed: Profile was found but path is not set. + TemplateNotFound: Path was set but file does not exists. + """ + + host_name = self.host_name + project_name = self.project_name + task_name = self.current_task_name + task_type = self.current_task_type + + build_profiles = self._get_build_profiles() + profile = filter_profiles( + build_profiles, + { + "task_types": task_type, + "task_names": task_name + } + ) + + if not profile: + raise TemplateProfileNotFound(( + "No matching profile found for task '{}' of type '{}' " + "with host '{}'" + ).format(task_name, task_type, host_name)) + + path = profile["path"] + if not path: + raise TemplateLoadFailed(( + "Template path is not set.\n" + "Path need to be set in {}\\Template Workfile Build " + "Settings\\Profiles" + ).format(host_name.title())) + + # Try fill path with environments and anatomy roots + anatomy = Anatomy(project_name) + fill_data = { + key: value + for key, value in os.environ.items() + } + fill_data["root"] = anatomy.roots + result = StringTemplate.format_template(path, fill_data) + if result.solved: + path = result.normalized() + + if path and os.path.exists(path): + self.log.info("Found template at: '{}'".format(path)) + return path + + solved_path = None + while True: + try: + solved_path = anatomy.path_remapper(path) + except KeyError as missing_key: + raise KeyError( + "Could not solve key '{}' in template path '{}'".format( + missing_key, path)) + + if solved_path is None: + solved_path = path + if solved_path == path: + break + path = solved_path + + solved_path = os.path.normpath(solved_path) + if not os.path.exists(solved_path): + raise TemplateNotFound( + "Template found in openPype settings for task '{}' with host " + "'{}' does not exists. (Not found : {})".format( + task_name, host_name, solved_path)) + + self.log.info("Found template at: '{}'".format(solved_path)) + + return solved_path + + +@six.add_metaclass(ABCMeta) +class PlaceholderPlugin(object): + """Plugin which care about handling of placeholder items logic. + + Plugin create and update placeholders in scene and populate them on + template import. Populating means that based on placeholder data happens + a logic in the scene. Most common logic is to load representation using + loaders or to create instances in scene. + """ + + label = None + _log = None + + def __init__(self, builder): + self._builder = builder + + @property + def builder(self): + """Access to builder which initialized the plugin. + + Returns: + AbstractTemplateBuilder: Loader of template build. + """ + + return self._builder + + @property + def project_name(self): + return self._builder.project_name + + @property + def log(self): + """Dynamically created logger for the plugin.""" + + if self._log is None: + self._log = Logger.get_logger(repr(self)) + return self._log + + @property + def identifier(self): + """Identifier which will be stored to placeholder. + + Default implementation uses class name. + + Returns: + str: Unique identifier of placeholder plugin. + """ + + return self.__class__.__name__ + + @abstractmethod + def create_placeholder(self, placeholder_data): + """Create new placeholder in scene and get it's item. + + It matters on the plugin implementation if placeholder will use + selection in scene or create new node. + + Args: + placeholder_data (Dict[str, Any]): Data that were created + based on attribute definitions from 'get_placeholder_options'. + + Returns: + PlaceholderItem: Created placeholder item. + """ + + pass + + @abstractmethod + def update_placeholder(self, placeholder_item, placeholder_data): + """Update placeholder item with new data. + + New data should be propagated to object of placeholder item itself + and also into the scene. + + Reason: + Some placeholder plugins may require some special way how the + updates should be propagated to object. + + Args: + placeholder_item (PlaceholderItem): Object of placeholder that + should be updated. + placeholder_data (Dict[str, Any]): Data related to placeholder. + Should match plugin options. + """ + + pass + + @abstractmethod + def collect_placeholders(self): + """Collect placeholders from scene. + + Returns: + List[PlaceholderItem]: Placeholder objects. + """ + + pass + + def get_placeholder_options(self, options=None): + """Placeholder options for data showed. + + Returns: + List[AbtractAttrDef]: Attribute definitions of placeholder options. + """ + + return [] + + def get_placeholder_keys(self): + """Get placeholder keys that are stored in scene. + + Returns: + Set[str]: Key of placeholder keys that are stored in scene. + """ + + option_keys = get_attributes_keys(self.get_placeholder_options()) + option_keys.add("plugin_identifier") + return option_keys + + def prepare_placeholders(self, placeholders): + """Preparation part of placeholders. + + Args: + placeholders (List[PlaceholderItem]): List of placeholders that + will be processed. + """ + + pass + + @abstractmethod + def populate_placeholder(self, placeholder): + """Process single placeholder item. + + Processing of placeholders is defined by their order thus can't be + processed in batch. + + Args: + placeholder (PlaceholderItem): Placeholder that should be + processed. + """ + + pass + + def repopulate_placeholder(self, placeholder): + """Update scene with current context for passed placeholder. + + Can be used to re-run placeholder logic (if it make sense). + """ + + pass + + def get_plugin_shared_data(self, key): + """Receive shared data across plugin and placeholders. + + Using shared data from builder but stored under plugin identifier. + + Args: + key (str): Key under which are shared data stored. + + Returns: + Union[None, Any]: None if key was not set. + """ + + plugin_data = self.builder.get_shared_data(self.identifier) + if plugin_data is None: + return None + return plugin_data.get(key) + + def set_plugin_shared_data(self, key, value): + """Store share data across plugin and placeholders. + + Using shared data from builder but stored under plugin identifier. + + Key should be self explanatory to content. + - wrong: 'asset' + - good: 'asset_name' + + Args: + key (str): Key under which is key stored. + value (Any): Value that should be stored under the key. + """ + + plugin_data = self.builder.get_shared_data(self.identifier) + if plugin_data is None: + plugin_data = {} + plugin_data[key] = value + self.builder.set_shared_data(self.identifier, plugin_data) + + def get_plugin_shared_populate_data(self, key): + """Receive shared data across plugin and placeholders. + + Using shared populate data from builder but stored under plugin + identifier. + + Shared populate data are cleaned up during populate while loop. + + Args: + key (str): Key under which are shared data stored. + + Returns: + Union[None, Any]: None if key was not set. + """ + + plugin_data = self.builder.get_shared_populate_data(self.identifier) + if plugin_data is None: + return None + return plugin_data.get(key) + + def set_plugin_shared_populate_data(self, key, value): + """Store share data across plugin and placeholders. + + Using shared data from builder but stored under plugin identifier. + + Key should be self explanatory to content. + - wrong: 'asset' + - good: 'asset_name' + + Shared populate data are cleaned up during populate while loop. + + Args: + key (str): Key under which is key stored. + value (Any): Value that should be stored under the key. + """ + + plugin_data = self.builder.get_shared_populate_data(self.identifier) + if plugin_data is None: + plugin_data = {} + plugin_data[key] = value + self.builder.set_shared_populate_data(self.identifier, plugin_data) + + +class PlaceholderItem(object): + """Item representing single item in scene that is a placeholder to process. + + Items are always created and updated by their plugins. Each plugin can use + modified class of 'PlacehoderItem' but only to add more options instead of + new other. + + Scene identifier is used to avoid processing of the palceholder item + multiple times so must be unique across whole workfile builder. + + Args: + scene_identifier (str): Unique scene identifier. If placeholder is + created from the same "node" it must have same identifier. + data (Dict[str, Any]): Data related to placeholder. They're defined + by plugin. + plugin (PlaceholderPlugin): Plugin which created the placeholder item. + """ + + default_order = 100 + + def __init__(self, scene_identifier, data, plugin): + self._log = None + self._scene_identifier = scene_identifier + self._data = data + self._plugin = plugin + + # Keep track about state of Placeholder process + self._state = 0 + + # Error messages to be shown in UI + # - all other messages should be logged + self._errors = [] # -> List[str] + + @property + def plugin(self): + """Access to plugin which created placeholder. + + Returns: + PlaceholderPlugin: Plugin object. + """ + + return self._plugin + + @property + def builder(self): + """Access to builder. + + Returns: + AbstractTemplateBuilder: Builder which is the top part of + placeholder. + """ + + return self.plugin.builder + + @property + def data(self): + """Placeholder data which can modify how placeholder is processed. + + Possible general keys + - order: Can define the order in which is palceholder processed. + Lower == earlier. + + Other keys are defined by placeholder and should validate them on item + creation. + + Returns: + Dict[str, Any]: Placeholder item data. + """ + + return self._data + + def to_dict(self): + """Create copy of item's data. + + Returns: + Dict[str, Any]: Placeholder data. + """ + + return copy.deepcopy(self.data) + + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(repr(self)) + return self._log + + def __repr__(self): + return "< {} {} >".format(self.__class__.__name__, self.name) + + @property + def order(self): + """Order of item processing.""" + + order = self._data.get("order") + if order is None: + return self.default_order + return order + + @property + def scene_identifier(self): + return self._scene_identifier + + @property + def finished(self): + """Item was already processed.""" + + return self._state == 2 + + @property + def in_progress(self): + """Processing is in progress.""" + + return self._state == 1 + + def set_in_progress(self): + """Change to in progress state.""" + + self._state = 1 + + def set_finished(self): + """Change to finished state.""" + + self._state = 2 + + def set_failed(self, exception): + self.add_error(str(exception)) + + def add_error(self, error): + """Set placeholder item as failed and mark it as finished.""" + + self._errors.append(error) + + def get_errors(self): + """Exception with which the placeholder process failed. + + Gives ability to access the exception. + """ + + return self._errors + + +class PlaceholderLoadMixin(object): + """Mixin prepared for loading placeholder plugins. + + Implementation prepares options for placeholders with + 'get_load_plugin_options'. + + For placeholder population is implemented 'populate_load_placeholder'. + + PlaceholderItem can have implemented methods: + - 'load_failed' - called when loading of one representation failed + - 'load_succeed' - called when loading of one representation succeeded + """ + + def get_load_plugin_options(self, options=None): + """Unified attribute definitions for load placeholder. + + Common function for placeholder plugins used for loading of + repsentations. Use it in 'get_placeholder_options'. + + Args: + plugin (PlaceholderPlugin): Plugin used for loading of + representations. + options (Dict[str, Any]): Already available options which are used + as defaults for attributes. + + Returns: + List[AbtractAttrDef]: Attribute definitions common for load + plugins. + """ + + loaders_by_name = self.builder.get_loaders_by_name() + loader_items = [ + (loader_name, loader.label or loader_name) + for loader_name, loader in loaders_by_name.items() + ] + + loader_items = list(sorted(loader_items, key=lambda i: i[1])) + options = options or {} + return [ + attribute_definitions.UISeparatorDef(), + attribute_definitions.UILabelDef("Main attributes"), + attribute_definitions.UISeparatorDef(), + + attribute_definitions.EnumDef( + "builder_type", + label="Asset Builder Type", + default=options.get("builder_type"), + items=[ + ("context_asset", "Current asset"), + ("linked_asset", "Linked assets"), + ("all_assets", "All assets") + ], + tooltip=( + "Asset Builder Type\n" + "\nBuilder type describe what template loader will look" + " for." + "\ncontext_asset : Template loader will look for subsets" + " of current context asset (Asset bob will find asset)" + "\nlinked_asset : Template loader will look for assets" + " linked to current context asset." + "\nLinked asset are looked in database under" + " field \"inputLinks\"" + ) + ), + attribute_definitions.TextDef( + "family", + label="Family", + default=options.get("family"), + placeholder="model, look, ..." + ), + attribute_definitions.TextDef( + "representation", + label="Representation name", + default=options.get("representation"), + placeholder="ma, abc, ..." + ), + attribute_definitions.EnumDef( + "loader", + label="Loader", + default=options.get("loader"), + items=loader_items, + tooltip=( + "Loader" + "\nDefines what OpenPype loader will be used to" + " load assets." + "\nUseable loader depends on current host's loader list." + "\nField is case sensitive." + ) + ), + attribute_definitions.TextDef( + "loader_args", + label="Loader Arguments", + default=options.get("loader_args"), + placeholder='{"camera":"persp", "lights":True}', + tooltip=( + "Loader" + "\nDefines a dictionnary of arguments used to load assets." + "\nUseable arguments depend on current placeholder Loader." + "\nField should be a valid python dict." + " Anything else will be ignored." + ) + ), + attribute_definitions.NumberDef( + "order", + label="Order", + default=options.get("order") or 0, + decimals=0, + minimum=0, + maximum=999, + tooltip=( + "Order" + "\nOrder defines asset loading priority (0 to 999)" + "\nPriority rule is : \"lowest is first to load\"." + ) + ), + attribute_definitions.UISeparatorDef(), + attribute_definitions.UILabelDef("Optional attributes"), + attribute_definitions.UISeparatorDef(), + attribute_definitions.TextDef( + "asset", + label="Asset filter", + default=options.get("asset"), + placeholder="regex filtering by asset name", + tooltip=( + "Filtering assets by matching field regex to asset's name" + ) + ), + attribute_definitions.TextDef( + "subset", + label="Subset filter", + default=options.get("subset"), + placeholder="regex filtering by subset name", + tooltip=( + "Filtering assets by matching field regex to subset's name" + ) + ), + attribute_definitions.TextDef( + "hierarchy", + label="Hierarchy filter", + default=options.get("hierarchy"), + placeholder="regex filtering by asset's hierarchy", + tooltip=( + "Filtering assets by matching field asset's hierarchy" + ) + ) + ] + + def parse_loader_args(self, loader_args): + """Helper function to parse string of loader arugments. + + Empty dictionary is returned if conversion fails. + + Args: + loader_args (str): Loader args filled by user. + + Returns: + Dict[str, Any]: Parsed arguments used as dictionary. + """ + + if not loader_args: + return {} + + try: + parsed_args = eval(loader_args) + if isinstance(parsed_args, dict): + return parsed_args + + except Exception as err: + print( + "Error while parsing loader arguments '{}'.\n{}: {}\n\n" + "Continuing with default arguments. . .".format( + loader_args, err.__class__.__name__, err)) + + return {} + + def _get_representations(self, placeholder): + """Prepared query of representations based on load options. + + This function is directly connected to options defined in + 'get_load_plugin_options'. + + Note: + This returns all representation documents from all versions of + matching subset. To filter for last version use + '_reduce_last_version_repre_docs'. + + Args: + placeholder (PlaceholderItem): Item which should be populated. + + Returns: + List[Dict[str, Any]]: Representation documents matching filters + from placeholder data. + """ + + project_name = self.builder.project_name + current_asset_doc = self.builder.current_asset_doc + linked_asset_docs = self.builder.linked_asset_docs + + builder_type = placeholder.data["builder_type"] + if builder_type == "context_asset": + context_filters = { + "asset": [current_asset_doc["name"]], + "subset": [re.compile(placeholder.data["subset"])], + "hierarchy": [re.compile(placeholder.data["hierarchy"])], + "representation": [placeholder.data["representation"]], + "family": [placeholder.data["family"]] + } + + elif builder_type != "linked_asset": + context_filters = { + "asset": [re.compile(placeholder.data["asset"])], + "subset": [re.compile(placeholder.data["subset"])], + "hierarchy": [re.compile(placeholder.data["hierarchy"])], + "representation": [placeholder.data["representation"]], + "family": [placeholder.data["family"]] + } + + else: + asset_regex = re.compile(placeholder.data["asset"]) + linked_asset_names = [] + for asset_doc in linked_asset_docs: + asset_name = asset_doc["name"] + if asset_regex.match(asset_name): + linked_asset_names.append(asset_name) + + context_filters = { + "asset": linked_asset_names, + "subset": [re.compile(placeholder.data["subset"])], + "hierarchy": [re.compile(placeholder.data["hierarchy"])], + "representation": [placeholder.data["representation"]], + "family": [placeholder.data["family"]], + } + + return list(get_representations( + project_name, + context_filters=context_filters + )) + + def _before_repre_load(self, placeholder, representation): + """Can be overriden. Is called before representation is loaded.""" + + pass + + def _reduce_last_version_repre_docs(self, representations): + """Reduce representations to last verison.""" + + mapping = {} + for repre_doc in representations: + repre_context = repre_doc["context"] + + asset_name = repre_context["asset"] + subset_name = repre_context["subset"] + version = repre_context.get("version", -1) + + if asset_name not in mapping: + mapping[asset_name] = {} + + subset_mapping = mapping[asset_name] + if subset_name not in subset_mapping: + subset_mapping[subset_name] = collections.defaultdict(list) + + version_mapping = subset_mapping[subset_name] + version_mapping[version].append(repre_doc) + + output = [] + for subset_mapping in mapping.values(): + for version_mapping in subset_mapping.values(): + last_version = tuple(sorted(version_mapping.keys()))[-1] + output.extend(version_mapping[last_version]) + return output + + def populate_load_placeholder(self, placeholder, ignore_repre_ids=None): + """Load placeholder is goind to load matching representations. + + Note: + Ignore repre ids is to avoid loading the same representation again + on load. But the representation can be loaded with different loader + and there could be published new version of matching subset for the + representation. We should maybe expect containers. + + Also import loaders don't have containers at all... + + Args: + placeholder (PlaceholderItem): Placeholder item with information + about requested representations. + ignore_repre_ids (Iterable[Union[str, ObjectId]]): Representation + ids that should be skipped. + """ + + if ignore_repre_ids is None: + ignore_repre_ids = set() + + # TODO check loader existence + loader_name = placeholder.data["loader"] + loader_args = placeholder.data["loader_args"] + + placeholder_representations = self._get_representations(placeholder) + + filtered_representations = [] + for representation in self._reduce_last_version_repre_docs( + placeholder_representations + ): + repre_id = str(representation["_id"]) + if repre_id not in ignore_repre_ids: + filtered_representations.append(representation) + + if not filtered_representations: + self.log.info(( + "There's no representation for this placeholder: {}" + ).format(placeholder.scene_identifier)) + return + + repre_load_contexts = get_contexts_for_repre_docs( + self.project_name, filtered_representations + ) + loaders_by_name = self.builder.get_loaders_by_name() + for repre_load_context in repre_load_contexts.values(): + representation = repre_load_context["representation"] + repre_context = representation["context"] + self._before_repre_load( + placeholder, representation + ) + self.log.info( + "Loading {} from {} with loader {}\n" + "Loader arguments used : {}".format( + repre_context["subset"], + repre_context["asset"], + loader_name, + loader_args + ) + ) + try: + container = load_with_repre_context( + loaders_by_name[loader_name], + repre_load_context, + options=self.parse_loader_args(loader_args) + ) + + except Exception: + failed = True + self.load_failed(placeholder, representation) + + else: + failed = False + self.load_succeed(placeholder, container) + self.cleanup_placeholder(placeholder, failed) + + def load_failed(self, placeholder, representation): + if hasattr(placeholder, "load_failed"): + placeholder.load_failed(representation) + + def load_succeed(self, placeholder, container): + if hasattr(placeholder, "load_succeed"): + placeholder.load_succeed(container) + + def cleanup_placeholder(self, placeholder, failed): + """Cleanup placeholder after load of single representation. + + Can be called multiple times during placeholder item populating and is + called even if loading failed. + + Args: + placeholder (PlaceholderItem): Item which was just used to load + representation. + failed (bool): Loading of representation failed. + """ + + pass + + +class LoadPlaceholderItem(PlaceholderItem): + """PlaceholderItem for plugin which is loading representations. + + Connected to 'PlaceholderLoadMixin'. + """ + + def __init__(self, *args, **kwargs): + super(LoadPlaceholderItem, self).__init__(*args, **kwargs) + self._failed_representations = [] + + def get_errors(self): + if not self._failed_representations: + return [] + message = ( + "Failed to load {} representations using Loader {}" + ).format( + len(self._failed_representations), + self.data["loader"] + ) + return [message] + + def load_failed(self, representation): + self._failed_representations.append(representation) diff --git a/openpype/plugin.py b/openpype/plugin.py index bb9bc2ff85..7e906b4451 100644 --- a/openpype/plugin.py +++ b/openpype/plugin.py @@ -1,24 +1,91 @@ -import tempfile -import os +import functools +import warnings + import pyblish.api +# New location of orders: openpype.pipeline.publish.constants +# - can be imported as +# 'from openpype.pipeline.publish import ValidatePipelineOrder' ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05 ValidateContentsOrder = pyblish.api.ValidatorOrder + 0.1 ValidateSceneOrder = pyblish.api.ValidatorOrder + 0.2 ValidateMeshOrder = pyblish.api.ValidatorOrder + 0.3 +class PluginDeprecatedWarning(DeprecationWarning): + pass + + +def _deprecation_warning(item_name, warning_message): + warnings.simplefilter("always", PluginDeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(item_name, warning_message), + category=PluginDeprecatedWarning, + stacklevel=4 + ) + + +def deprecated(new_destination): + """Mark functions as deprecated. + + It will result in a warning being emitted when the function is used. + """ + + func = None + if callable(new_destination): + func = new_destination + new_destination = None + + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + _deprecation_warning(decorated_func.__name__, warning_message) + return decorated_func(*args, **kwargs) + return wrapper + + if func is None: + return _decorator + return _decorator(func) + + +# Classes just inheriting from pyblish classes +# - seems to be unused in code (not 100% sure) +# - they should be removed but because it is not clear if they're used +# we'll keep then and log deprecation warning +# Deprecated since 3.14.* will be removed in 3.16.* class ContextPlugin(pyblish.api.ContextPlugin): - def process(cls, *args, **kwargs): - super(ContextPlugin, cls).process(cls, *args, **kwargs) + def __init__(self, *args, **kwargs): + _deprecation_warning( + "openpype.plugin.ContextPlugin", + " Please replace your usage with 'pyblish.api.ContextPlugin'." + ) + super(ContextPlugin, self).__init__(*args, **kwargs) +# Deprecated since 3.14.* will be removed in 3.16.* class InstancePlugin(pyblish.api.InstancePlugin): - def process(cls, *args, **kwargs): - super(InstancePlugin, cls).process(cls, *args, **kwargs) + def __init__(self, *args, **kwargs): + _deprecation_warning( + "openpype.plugin.ContextPlugin", + " Please replace your usage with 'pyblish.api.InstancePlugin'." + ) + super(InstancePlugin, self).__init__(*args, **kwargs) -class Extractor(InstancePlugin): +class Extractor(pyblish.api.InstancePlugin): """Extractor base class. The extractor base class implements a "staging_dir" function used to @@ -36,17 +103,13 @@ class Extractor(InstancePlugin): Upon calling this method the staging directory is stored inside the instance.data['stagingDir'] """ - staging_dir = instance.data.get('stagingDir', None) - if not staging_dir: - staging_dir = os.path.normpath( - tempfile.mkdtemp(prefix="pyblish_tmp_") - ) - instance.data['stagingDir'] = staging_dir + from openpype.pipeline.publish import get_instance_staging_dir - return staging_dir + return get_instance_staging_dir(instance) +@deprecated("openpype.pipeline.publish.context_plugin_should_run") def contextplugin_should_run(plugin, context): """Return whether the ContextPlugin should run on the given context. @@ -56,30 +119,10 @@ def contextplugin_should_run(plugin, context): This actually checks it correctly and returns whether it should run. + Deprecated: + Since 3.14.* will be removed in 3.16.* or later. """ - required = set(plugin.families) - # When no filter always run - if "*" in required: - return True + from openpype.pipeline.publish import context_plugin_should_run - for instance in context: - - # Ignore inactive instances - if (not instance.data.get("publish", True) or - not instance.data.get("active", True)): - continue - - families = instance.data.get("families", []) - if any(f in required for f in families): - return True - - family = instance.data.get("family") - if family and family in required: - return True - - return False - - -class ValidationException(Exception): - pass + return context_plugin_should_run(plugin, context) diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py index 55fda55d17..ac931e41db 100644 --- a/openpype/plugins/load/add_site.py +++ b/openpype/plugins/load/add_site.py @@ -1,6 +1,6 @@ +from openpype.client import get_linked_representation_id from openpype.modules import ModulesManager from openpype.pipeline import load -from openpype.lib.avalon_context import get_linked_ids_for_representations from openpype.modules.sync_server.utils import SiteAlreadyPresentError @@ -45,9 +45,11 @@ class AddSyncSite(load.LoaderPlugin): force=True) if family == "workfile": - links = get_linked_ids_for_representations(project_name, - [repre_id], - link_type="reference") + links = get_linked_representation_id( + project_name, + repre_id=repre_id, + link_type="reference" + ) for link_repre_id in links: try: self.sync_server.add_site(project_name, link_repre_id, diff --git a/openpype/plugins/load/delete_old_versions.py b/openpype/plugins/load/delete_old_versions.py index 6e0b464cc1..b7ac015268 100644 --- a/openpype/plugins/load/delete_old_versions.py +++ b/openpype/plugins/load/delete_old_versions.py @@ -7,11 +7,15 @@ from pymongo import UpdateOne import qargparse from Qt import QtWidgets, QtCore -from openpype.client import get_versions, get_representations from openpype import style -from openpype.pipeline import load, AvalonMongoDB, Anatomy -from openpype.lib import StringTemplate +from openpype.client import get_versions, get_representations from openpype.modules import ModulesManager +from openpype.lib import format_file_size +from openpype.pipeline import load, AvalonMongoDB, Anatomy +from openpype.pipeline.load import ( + get_representation_path_with_anatomy, + InvalidRepresentationContext, +) class DeleteOldVersions(load.SubsetLoaderPlugin): @@ -38,13 +42,6 @@ class DeleteOldVersions(load.SubsetLoaderPlugin): ) ] - def sizeof_fmt(self, num, suffix='B'): - for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']: - if abs(num) < 1024.0: - return "%3.1f%s%s" % (num, unit, suffix) - num /= 1024.0 - return "%.1f%s%s" % (num, 'Yi', suffix) - def delete_whole_dir_paths(self, dir_paths, delete=True): size = 0 @@ -80,27 +77,28 @@ class DeleteOldVersions(load.SubsetLoaderPlugin): def path_from_representation(self, representation, anatomy): try: - template = representation["data"]["template"] - + context = representation["context"] except KeyError: return (None, None) + try: + path = get_representation_path_with_anatomy( + representation, anatomy + ) + except InvalidRepresentationContext: + return (None, None) + sequence_path = None - try: - context = representation["context"] - context["root"] = anatomy.roots - path = str(StringTemplate.format_template(template, context)) - if "frame" in context: - context["frame"] = self.sequence_splitter - sequence_path = os.path.normpath(str( - StringTemplate.format_template(template, context) - )) + if "frame" in context: + context["frame"] = self.sequence_splitter + sequence_path = get_representation_path_with_anatomy( + representation, anatomy + ) - except KeyError: - # Template references unavailable data - return (None, None) + if sequence_path: + sequence_path = sequence_path.normalized() - return (os.path.normpath(path), sequence_path) + return (path.normalized(), sequence_path) def delete_only_repre_files(self, dir_paths, file_paths, delete=True): size = 0 @@ -456,7 +454,7 @@ class DeleteOldVersions(load.SubsetLoaderPlugin): size += self.main(project_name, data, remove_publish_folder) print("Progressing {}/{}".format(count + 1, len(contexts))) - msg = "Total size of files: " + self.sizeof_fmt(size) + msg = "Total size of files: {}".format(format_file_size(size)) self.log.info(msg) self.message(msg) diff --git a/openpype/plugins/load/delivery.py b/openpype/plugins/load/delivery.py index f6e1d4f06b..89c24f2402 100644 --- a/openpype/plugins/load/delivery.py +++ b/openpype/plugins/load/delivery.py @@ -7,15 +7,17 @@ from openpype.client import get_representations from openpype.pipeline import load, Anatomy from openpype import resources, style -from openpype.lib.dateutils import get_datetime_data -from openpype.lib.delivery import ( - sizeof_fmt, - path_from_representation, +from openpype.lib import ( + format_file_size, + collect_frames, + get_datetime_data, +) +from openpype.pipeline.load import get_representation_path_with_anatomy +from openpype.pipeline.delivery import ( get_format_dict, check_destination_path, - process_single_file, - process_sequence, - collect_frames + deliver_single_file, + deliver_sequence, ) @@ -167,7 +169,9 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): if repre["name"] not in selected_repres: continue - repre_path = path_from_representation(repre, self.anatomy) + repre_path = get_representation_path_with_anatomy( + repre, self.anatomy + ) anatomy_data = copy.deepcopy(repre["context"]) new_report_items = check_destination_path(str(repre["_id"]), @@ -202,7 +206,7 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): args[0] = src_path if frame: anatomy_data["frame"] = frame - new_report_items, uploaded = process_single_file(*args) + new_report_items, uploaded = deliver_single_file(*args) report_items.update(new_report_items) self._update_progress(uploaded) else: # fallback for Pype2 and representations without files @@ -211,9 +215,9 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): repre["context"]["frame"] = len(str(frame)) * "#" if not frame: - new_report_items, uploaded = process_single_file(*args) + new_report_items, uploaded = deliver_single_file(*args) else: - new_report_items, uploaded = process_sequence(*args) + new_report_items, uploaded = deliver_sequence(*args) report_items.update(new_report_items) self._update_progress(uploaded) @@ -263,8 +267,9 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): def _prepare_label(self): """Provides text with no of selected files and their size.""" - label = "{} files, size {}".format(self.files_selected, - sizeof_fmt(self.size_selected)) + label = "{} files, size {}".format( + self.files_selected, + format_file_size(self.size_selected)) return label def _get_selected_repres(self): diff --git a/openpype/plugins/load/open_file.py b/openpype/plugins/load/open_file.py index f21cd07c7f..00b2ecd7c5 100644 --- a/openpype/plugins/load/open_file.py +++ b/openpype/plugins/load/open_file.py @@ -15,8 +15,8 @@ def open(filepath): subprocess.call(('xdg-open', filepath)) -class Openfile(load.LoaderPlugin): - """Open Image Sequence with system default""" +class OpenFile(load.LoaderPlugin): + """Open Image Sequence or Video with system default""" families = ["render2d"] representations = ["*"] @@ -27,32 +27,10 @@ class Openfile(load.LoaderPlugin): color = "orange" def load(self, context, name, namespace, data): - import clique - directory = os.path.dirname(self.fname) - pattern = clique.PATTERNS["frames"] + path = self.fname + if not os.path.exists(path): + raise RuntimeError("File not found: {}".format(path)) - files = os.listdir(directory) - representation = context["representation"] - - ext = representation["name"] - path = representation["data"]["path"] - - if ext in ["#"]: - collections, remainder = clique.assemble(files, - patterns=[pattern], - minimum_items=1) - - seqeunce = collections[0] - - first_image = list(seqeunce)[0] - filepath = os.path.normpath(os.path.join(directory, first_image)) - else: - file = [f for f in files - if ext in f - if "#" not in f][0] - filepath = os.path.normpath(os.path.join(directory, file)) - - self.log.info("Opening : {}".format(filepath)) - - open(filepath) + self.log.info("Opening : {}".format(path)) + open(path) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py new file mode 100644 index 0000000000..7d53b24e54 --- /dev/null +++ b/openpype/plugins/publish/collect_audio.py @@ -0,0 +1,105 @@ +import pyblish.api + +from openpype.client import ( + get_last_version_by_subset_name, + get_representations, +) +from openpype.pipeline import ( + legacy_io, + get_representation_path, +) + + +class CollectAudio(pyblish.api.InstancePlugin): + """Collect asset's last published audio. + + The audio subset name searched for is defined in: + project settings > Collect Audio + """ + label = "Collect Asset Audio" + order = pyblish.api.CollectorOrder + 0.1 + families = ["review"] + hosts = [ + "nuke", + "maya", + "shell", + "hiero", + "premiere", + "harmony", + "traypublisher", + "standalonepublisher", + "fusion", + "tvpaint", + "resolve", + "webpublisher", + "aftereffects", + "flame", + "unreal" + ] + + audio_subset_name = "audioMain" + + def process(self, instance): + if instance.data.get("audio"): + self.log.info( + "Skipping Audio collecion. It is already collected" + ) + return + + # Add audio to instance if exists. + self.log.info(( + "Searching for audio subset '{subset}'" + " in asset '{asset}'" + ).format( + subset=self.audio_subset_name, + asset=instance.data["asset"] + )) + + repre_doc = self._get_repre_doc(instance) + + # Add audio to instance if representation was found + if repre_doc: + instance.data["audio"] = [{ + "offset": 0, + "filename": get_representation_path(repre_doc) + }] + self.log.info("Audio Data added to instance ...") + + def _get_repre_doc(self, instance): + cache = instance.context.data.get("__cache_asset_audio") + if cache is None: + cache = {} + instance.context.data["__cache_asset_audio"] = cache + asset_name = instance.data["asset"] + + # first try to get it from cache + if asset_name in cache: + return cache[asset_name] + + project_name = legacy_io.active_project() + + # Find latest versions document + last_version_doc = get_last_version_by_subset_name( + project_name, + self.audio_subset_name, + asset_name=asset_name, + fields=["_id"] + ) + + repre_doc = None + if last_version_doc: + # Try to find it's representation (Expected there is only one) + repre_docs = list(get_representations( + project_name, version_ids=[last_version_doc["_id"]] + )) + if not repre_docs: + self.log.warning( + "Version document does not contain any representations" + ) + else: + repre_doc = repre_docs[0] + + # update cache + cache[asset_name] = repre_doc + + return repre_doc diff --git a/openpype/plugins/publish/collect_context_label.py b/openpype/plugins/publish/collect_context_label.py index 8cf71882aa..6cdeba8418 100644 --- a/openpype/plugins/publish/collect_context_label.py +++ b/openpype/plugins/publish/collect_context_label.py @@ -1,5 +1,6 @@ """ -Requires: +Optional: + context -> hostName (str) context -> currentFile (str) Provides: context -> label (str) @@ -16,16 +17,27 @@ class CollectContextLabel(pyblish.api.ContextPlugin): label = "Context Label" def process(self, context): + # Add ability to use custom context label + label = context.data.get("label") + if label: + self.log.debug("Context label is already set to \"{}\"".format( + label + )) + return - # Get last registered host - host = pyblish.api.registered_hosts()[-1] + host_name = context.data.get("hostName") + if not host_name: + host_name = pyblish.api.registered_hosts()[-1] + # Use host name as base for label + label = host_name.title() - # Get scene name from "currentFile" - path = context.data.get("currentFile") or "" - base = os.path.basename(path) + # Get scene name from "currentFile" and use basename as ending of label + path = context.data.get("currentFile") + if path: + label += " - {}".format(os.path.basename(path)) # Set label - label = "{host} - {scene}".format(host=host.title(), scene=base) - if host == "standalonepublisher": - label = host.title() context.data["label"] = label + self.log.debug("Context label is changed to \"{}\"".format( + label + )) diff --git a/openpype/plugins/publish/collect_from_create_context.py b/openpype/plugins/publish/collect_from_create_context.py index 9236c698ed..fc0f97b187 100644 --- a/openpype/plugins/publish/collect_from_create_context.py +++ b/openpype/plugins/publish/collect_from_create_context.py @@ -25,7 +25,9 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): for created_instance in create_context.instances: instance_data = created_instance.data_to_store() if instance_data["active"]: - self.create_instance(context, instance_data) + self.create_instance( + context, instance_data, created_instance.transient_data + ) # Update global data to context context.data.update(create_context.context_data_to_store()) @@ -37,7 +39,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): legacy_io.Session[key] = value os.environ[key] = value - def create_instance(self, context, in_data): + def create_instance(self, context, in_data, transient_data): subset = in_data["subset"] # If instance data already contain families then use it instance_families = in_data.get("families") or [] @@ -56,5 +58,8 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): for key, value in in_data.items(): if key not in instance.data: instance.data[key] = value + + instance.data["transientData"] = transient_data + self.log.info("collected instance: {}".format(instance.data)) self.log.info("parsing data: {}".format(in_data)) diff --git a/openpype/plugins/publish/collect_input_representations_to_versions.py b/openpype/plugins/publish/collect_input_representations_to_versions.py new file mode 100644 index 0000000000..18a19bce80 --- /dev/null +++ b/openpype/plugins/publish/collect_input_representations_to_versions.py @@ -0,0 +1,47 @@ +import pyblish.api + +from bson.objectid import ObjectId + +from openpype.client import get_representations + + +class CollectInputRepresentationsToVersions(pyblish.api.ContextPlugin): + """Converts collected input representations to input versions. + + Any data in `instance.data["inputRepresentations"]` gets converted into + `instance.data["inputVersions"]` as supported in OpenPype v3. + + """ + # This is a ContextPlugin because then we can query the database only once + # for the conversion of representation ids to version ids (optimization) + label = "Input Representations to Versions" + order = pyblish.api.CollectorOrder + 0.499 + hosts = ["*"] + + def process(self, context): + # Query all version ids for representation ids from the database once + representations = set() + for instance in context: + inst_repre = instance.data.get("inputRepresentations", []) + representations.update(inst_repre) + + representations_docs = get_representations( + project_name=context.data["projectEntity"]["name"], + representation_ids=representations, + fields=["_id", "parent"]) + + representation_id_to_version_id = { + repre["_id"]: repre["parent"] for repre in representations_docs + } + + for instance in context: + inst_repre = instance.data.get("inputRepresentations", []) + if not inst_repre: + continue + + input_versions = instance.data.get("inputVersions", []) + for repre_id in inst_repre: + repre_id = ObjectId(repre_id) + version_id = representation_id_to_version_id[repre_id] + input_versions.append(version_id) + instance.data["inputVersions"] = input_versions diff --git a/openpype/plugins/publish/collect_otio_frame_ranges.py b/openpype/plugins/publish/collect_otio_frame_ranges.py index 40e89e29bc..9a68b6e43d 100644 --- a/openpype/plugins/publish/collect_otio_frame_ranges.py +++ b/openpype/plugins/publish/collect_otio_frame_ranges.py @@ -10,6 +10,7 @@ import opentimelineio as otio import pyblish.api from pprint import pformat from openpype.pipeline.editorial import ( + get_media_range_with_retimes, otio_range_to_frame_range, otio_range_with_handles ) @@ -29,6 +30,7 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin): # get basic variables otio_clip = instance.data["otioClip"] workfile_start = instance.data["workfileFrameStart"] + workfile_source_duration = instance.data.get("shotDurationFromSource") # get ranges otio_tl_range = otio_clip.range_in_parent() @@ -54,6 +56,18 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin): frame_end = frame_start + otio.opentime.to_frames( otio_tl_range.duration, otio_tl_range.duration.rate) - 1 + # in case of retimed clip and frame range should not be retimed + if workfile_source_duration: + # get available range trimmed with processed retimes + retimed_attributes = get_media_range_with_retimes( + otio_clip, 0, 0) + self.log.debug( + ">> retimed_attributes: {}".format(retimed_attributes)) + media_in = int(retimed_attributes["mediaIn"]) + media_out = int(retimed_attributes["mediaOut"]) + frame_end = frame_start + (media_out - media_in) + 1 + self.log.debug(frame_end) + data = { "frameStart": frame_start, "frameEnd": frame_end, diff --git a/openpype/plugins/publish/collect_otio_subset_resources.py b/openpype/plugins/publish/collect_otio_subset_resources.py index 9c19f8a78e..3387cd1176 100644 --- a/openpype/plugins/publish/collect_otio_subset_resources.py +++ b/openpype/plugins/publish/collect_otio_subset_resources.py @@ -121,10 +121,8 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin): otio.schema.ImageSequenceReference ): is_sequence = True - else: - # for OpenTimelineIO 0.12 and older - if metadata.get("padding"): - is_sequence = True + elif metadata.get("padding"): + is_sequence = True self.log.info( "frame_start-frame_end: {}-{}".format(frame_start, frame_end)) diff --git a/openpype/plugins/publish/collect_settings.py b/openpype/plugins/publish/collect_settings.py index d56eabd1b5..a418a6400c 100644 --- a/openpype/plugins/publish/collect_settings.py +++ b/openpype/plugins/publish/collect_settings.py @@ -1,5 +1,8 @@ from pyblish import api -from openpype.api import get_current_project_settings, get_system_settings +from openpype.settings import ( + get_current_project_settings, + get_system_settings, +) class CollectSettings(api.ContextPlugin): diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 88093fb92f..4179199317 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -8,10 +8,10 @@ import shutil import clique import six -import pyblish +import pyblish.api -import openpype -import openpype.api +from openpype import resources, PACKAGE_DIR +from openpype.pipeline import publish from openpype.lib import ( run_openpype_process, @@ -23,7 +23,7 @@ from openpype.lib import ( ) -class ExtractBurnin(openpype.api.Extractor): +class ExtractBurnin(publish.Extractor): """ Extractor to create video with pre-defined burnins from existing extracted video representation. @@ -400,7 +400,7 @@ class ExtractBurnin(openpype.api.Extractor): # Use OpenPype default font if not font_filepath: - font_filepath = openpype.api.resources.get_liberation_font_path() + font_filepath = resources.get_liberation_font_path() burnin_options["font"] = font_filepath @@ -488,12 +488,6 @@ class ExtractBurnin(openpype.api.Extractor): "frame_end_handle": frame_end_handle } - # use explicit username for webpublishes as rewriting - # OPENPYPE_USERNAME might have side effects - webpublish_user_name = os.environ.get("WEBPUBLISH_OPENPYPE_USERNAME") - if webpublish_user_name: - burnin_data["username"] = webpublish_user_name - self.log.debug( "Basic burnin_data: {}".format(json.dumps(burnin_data, indent=4)) ) @@ -981,7 +975,7 @@ class ExtractBurnin(openpype.api.Extractor): """Return path to python script for burnin processing.""" scriptpath = os.path.normpath( os.path.join( - openpype.PACKAGE_DIR, + PACKAGE_DIR, "scripts", "otio_burnin.py" ) diff --git a/openpype/plugins/publish/extract_otio_file.py b/openpype/plugins/publish/extract_otio_file.py index 4d310ce109..1a6a82117d 100644 --- a/openpype/plugins/publish/extract_otio_file.py +++ b/openpype/plugins/publish/extract_otio_file.py @@ -1,10 +1,11 @@ import os import pyblish.api -import openpype.api import opentimelineio as otio +from openpype.pipeline import publish -class ExtractOTIOFile(openpype.api.Extractor): + +class ExtractOTIOFile(publish.Extractor): """ Extractor export OTIO file """ @@ -15,6 +16,8 @@ class ExtractOTIOFile(openpype.api.Extractor): hosts = ["resolve", "hiero", "traypublisher"] def process(self, instance): + if not instance.context.data.get("otioTimeline"): + return # create representation data if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/openpype/plugins/publish/extract_otio_review.py b/openpype/plugins/publish/extract_otio_review.py index 2ce5323468..169ff9e136 100644 --- a/openpype/plugins/publish/extract_otio_review.py +++ b/openpype/plugins/publish/extract_otio_review.py @@ -18,7 +18,12 @@ import os import clique import opentimelineio as otio from pyblish import api -import openpype + +from openpype.lib import ( + get_ffmpeg_tool_path, + run_subprocess, +) +from openpype.pipeline import publish from openpype.pipeline.editorial import ( otio_range_to_frame_range, trim_media_range, @@ -28,7 +33,7 @@ from openpype.pipeline.editorial import ( ) -class ExtractOTIOReview(openpype.api.Extractor): +class ExtractOTIOReview(publish.Extractor): """ Extract OTIO timeline into one concuted image sequence file. @@ -334,7 +339,7 @@ class ExtractOTIOReview(openpype.api.Extractor): otio.time.TimeRange: trimmed available range """ # get rendering app path - ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg") + ffmpeg_path = get_ffmpeg_tool_path("ffmpeg") # create path and frame start to destination output_path, out_frame_start = self._get_ffmpeg_output() @@ -397,7 +402,7 @@ class ExtractOTIOReview(openpype.api.Extractor): ]) # execute self.log.debug("Executing: {}".format(" ".join(command))) - output = openpype.api.run_subprocess( + output = run_subprocess( command, logger=self.log ) self.log.debug("Output: {}".format(output)) diff --git a/openpype/plugins/publish/extract_otio_trimming_video.py b/openpype/plugins/publish/extract_otio_trimming_video.py index 19625fa568..70726338aa 100644 --- a/openpype/plugins/publish/extract_otio_trimming_video.py +++ b/openpype/plugins/publish/extract_otio_trimming_video.py @@ -6,18 +6,24 @@ Requires: """ import os -from pyblish import api -import openpype from copy import deepcopy + +import pyblish.api + +from openpype.lib import ( + get_ffmpeg_tool_path, + run_subprocess, +) +from openpype.pipeline import publish from openpype.pipeline.editorial import frames_to_seconds -class ExtractOTIOTrimmingVideo(openpype.api.Extractor): +class ExtractOTIOTrimmingVideo(publish.Extractor): """ Trimming video file longer then required lenght """ - order = api.ExtractorOrder + order = pyblish.api.ExtractorOrder label = "Extract OTIO trim longer video" families = ["trim"] hosts = ["resolve", "hiero", "flame"] @@ -70,7 +76,7 @@ class ExtractOTIOTrimmingVideo(openpype.api.Extractor): """ # get rendering app path - ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg") + ffmpeg_path = get_ffmpeg_tool_path("ffmpeg") # create path to destination output_path = self._get_ffmpeg_output(input_file_path) @@ -96,7 +102,7 @@ class ExtractOTIOTrimmingVideo(openpype.api.Extractor): # execute self.log.debug("Executing: {}".format(" ".join(command))) - output = openpype.api.run_subprocess( + output = run_subprocess( command, logger=self.log ) self.log.debug("Output: {}".format(output)) diff --git a/openpype/plugins/publish/extract_review_slate.py b/openpype/plugins/publish/extract_review_slate.py index 69043ee261..fca3d96ca6 100644 --- a/openpype/plugins/publish/extract_review_slate.py +++ b/openpype/plugins/publish/extract_review_slate.py @@ -1,19 +1,22 @@ import os -from pprint import pformat import re -import openpype.api -import pyblish +from pprint import pformat + +import pyblish.api + from openpype.lib import ( path_to_subprocess_arg, + run_subprocess, get_ffmpeg_tool_path, get_ffprobe_data, get_ffprobe_streams, get_ffmpeg_codec_args, get_ffmpeg_format_args, ) +from openpype.pipeline import publish -class ExtractReviewSlate(openpype.api.Extractor): +class ExtractReviewSlate(publish.Extractor): """ Will add slate frame at the start of the video files """ @@ -158,7 +161,7 @@ class ExtractReviewSlate(openpype.api.Extractor): input_args.extend([ "-loop", "1", - "-i", openpype.lib.path_to_subprocess_arg(slate_path), + "-i", path_to_subprocess_arg(slate_path), "-r", str(input_frame_rate), "-frames:v", "1", ]) @@ -267,7 +270,7 @@ class ExtractReviewSlate(openpype.api.Extractor): self.log.debug( "Slate Executing: {}".format(slate_subprocess_cmd) ) - openpype.api.run_subprocess( + run_subprocess( slate_subprocess_cmd, shell=True, logger=self.log ) @@ -348,7 +351,7 @@ class ExtractReviewSlate(openpype.api.Extractor): "Executing concat filter: {}".format (" ".join(concat_args)) ) - openpype.api.run_subprocess( + run_subprocess( concat_args, logger=self.log ) @@ -533,7 +536,7 @@ class ExtractReviewSlate(openpype.api.Extractor): self.log.debug("Silent Slate Executing: {}".format( " ".join(slate_silent_args) )) - openpype.api.run_subprocess( + run_subprocess( slate_silent_args, logger=self.log ) diff --git a/openpype/plugins/publish/extract_trim_video_audio.py b/openpype/plugins/publish/extract_trim_video_audio.py index 06817c4b5a..b951136391 100644 --- a/openpype/plugins/publish/extract_trim_video_audio.py +++ b/openpype/plugins/publish/extract_trim_video_audio.py @@ -1,14 +1,16 @@ import os +from pprint import pformat + import pyblish.api -import openpype.api from openpype.lib import ( get_ffmpeg_tool_path, + run_subprocess, ) -from pprint import pformat +from openpype.pipeline import publish -class ExtractTrimVideoAudio(openpype.api.Extractor): +class ExtractTrimVideoAudio(publish.Extractor): """Trim with ffmpeg "mov" and "wav" files.""" # must be before `ExtractThumbnailSP` @@ -98,7 +100,7 @@ class ExtractTrimVideoAudio(openpype.api.Extractor): joined_args = " ".join(ffmpeg_args) self.log.info(f"Processing: {joined_args}") - openpype.api.run_subprocess( + run_subprocess( ffmpeg_args, logger=self.log ) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index f99c718f8a..0998e643e6 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -5,6 +5,9 @@ import copy import clique import six +from bson.objectid import ObjectId +import pyblish.api + from openpype.client.operations import ( OperationsSession, new_subset_document, @@ -14,8 +17,6 @@ from openpype.client.operations import ( prepare_version_update_data, prepare_representation_update_data, ) -from bson.objectid import ObjectId -import pyblish.api from openpype.client import ( get_representations, @@ -23,10 +24,12 @@ from openpype.client import ( get_version_by_name, ) from openpype.lib import source_hash -from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.file_transaction import FileTransaction from openpype.pipeline import legacy_io -from openpype.pipeline.publish import KnownPublishError +from openpype.pipeline.publish import ( + KnownPublishError, + get_publish_template_name, +) log = logging.getLogger(__name__) @@ -135,7 +138,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # the database even if not used by the destination template db_representation_context_keys = [ "project", "asset", "task", "subset", "version", "representation", - "family", "hierarchy", "username", "output" + "family", "hierarchy", "username", "user", "output" ] skip_host_families = [] @@ -415,6 +418,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin): subset_group = instance.data.get("subsetGroup") if subset_group: data["subsetGroup"] = subset_group + elif existing_subset_doc: + # Preserve previous subset group if new version does not set it + if "subsetGroup" in existing_subset_doc.get("data", {}): + subset_group = existing_subset_doc["data"]["subsetGroup"] + data["subsetGroup"] = subset_group subset_id = None if existing_subset_doc: @@ -792,52 +800,26 @@ class IntegrateAsset(pyblish.api.InstancePlugin): def get_template_name(self, instance): """Return anatomy template name to use for integration""" - # Define publish template name from profiles - filter_criteria = self.get_profile_filter_criteria(instance) - template_name_profiles = self._get_template_name_profiles(instance) - profile = filter_profiles( - template_name_profiles, - filter_criteria, - logger=self.log - ) - - if profile: - return profile["template_name"] - return self.default_template_name - - def _get_template_name_profiles(self, instance): - """Receive profiles for publish template keys. - - Reuse template name profiles from legacy integrator. Goal is to move - the profile settings out of plugin settings but until that happens we - want to be able set it at one place and don't break backwards - compatibility (more then once). - """ - - return ( - instance.context.data["project_settings"] - ["global"] - ["publish"] - ["IntegrateAssetNew"] - ["template_name_profiles"] - ) - - def get_profile_filter_criteria(self, instance): - """Return filter criteria for `filter_profiles`""" # Anatomy data is pre-filled by Collectors - anatomy_data = instance.data["anatomyData"] + + project_name = legacy_io.active_project() # Task can be optional in anatomy data - task = anatomy_data.get("task", {}) + host_name = instance.context.data["hostName"] + anatomy_data = instance.data["anatomyData"] + family = anatomy_data["family"] + task_info = anatomy_data.get("task") or {} - # Return filter criteria - return { - "families": anatomy_data["family"], - "tasks": task.get("name"), - "task_types": task.get("type"), - "hosts": instance.context.data["hostName"], - } + return get_publish_template_name( + project_name, + host_name, + family, + task_name=task_info.get("name"), + task_type=task_info.get("type"), + project_settings=instance.context.data["project_settings"], + logger=self.log + ) def get_rootless_path(self, anatomy, path): """Returns, if possible, path without absolute portion from root diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 7d698ff98d..5f4d284740 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -4,8 +4,6 @@ import clique import errno import shutil -from bson.objectid import ObjectId -from pymongo import InsertOne, ReplaceOne import pyblish.api from openpype.client import ( @@ -14,14 +12,17 @@ from openpype.client import ( get_archived_representations, get_representations, ) -from openpype.lib import ( - create_hard_link, - filter_profiles +from openpype.client.operations import ( + OperationsSession, + new_hero_version_doc, + prepare_hero_version_update_data, + prepare_representation_update_data, ) +from openpype.lib import create_hard_link from openpype.pipeline import ( - schema, - legacy_io, + schema ) +from openpype.pipeline.publish import get_publish_template_name class IntegrateHeroVersion(pyblish.api.InstancePlugin): @@ -46,7 +47,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): ignored_representation_names = [] db_representation_context_keys = [ "project", "asset", "task", "subset", "representation", - "family", "hierarchy", "task", "username" + "family", "hierarchy", "task", "username", "user" ] # QUESTION/TODO this process should happen on server if crashed due to # permissions error on files (files were used or user didn't have perms) @@ -68,10 +69,11 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): ) return - template_key = self._get_template_key(instance) - anatomy = instance.context.data["anatomy"] project_name = anatomy.project_name + + template_key = self._get_template_key(project_name, instance) + if template_key not in anatomy.templates: self.log.warning(( "!!! Anatomy of project \"{}\" does not have set" @@ -188,35 +190,32 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): repre["name"].lower(): repre for repre in old_repres } + op_session = OperationsSession() + + entity_id = None if old_version: - new_version_id = old_version["_id"] - else: - new_version_id = ObjectId() - - new_hero_version = { - "_id": new_version_id, - "version_id": src_version_entity["_id"], - "parent": src_version_entity["parent"], - "type": "hero_version", - "schema": "openpype:hero_version-1.0" - } - schema.validate(new_hero_version) - - # Don't make changes in database until everything is O.K. - bulk_writes = [] + entity_id = old_version["_id"] + new_hero_version = new_hero_version_doc( + src_version_entity["_id"], + src_version_entity["parent"], + entity_id=entity_id + ) if old_version: self.log.debug("Replacing old hero version.") - bulk_writes.append( - ReplaceOne( - {"_id": new_hero_version["_id"]}, - new_hero_version - ) + update_data = prepare_hero_version_update_data( + old_version, new_hero_version + ) + op_session.update_entity( + project_name, + new_hero_version["type"], + old_version["_id"], + update_data ) else: self.log.debug("Creating first hero version.") - bulk_writes.append( - InsertOne(new_hero_version) + op_session.create_entity( + project_name, new_hero_version["type"], new_hero_version ) # Separate old representations into `to replace` and `to delete` @@ -236,7 +235,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): archived_repres = list(get_archived_representations( project_name, # Check what is type of archived representation - version_ids=[new_version_id] + version_ids=[new_hero_version["_id"]] )) archived_repres_by_name = {} for repre in archived_repres: @@ -383,12 +382,15 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): # Replace current representation if repre_name_low in old_repres_to_replace: old_repre = old_repres_to_replace.pop(repre_name_low) + repre["_id"] = old_repre["_id"] - bulk_writes.append( - ReplaceOne( - {"_id": old_repre["_id"]}, - repre - ) + update_data = prepare_representation_update_data( + old_repre, repre) + op_session.update_entity( + project_name, + old_repre["type"], + old_repre["_id"], + update_data ) # Unarchive representation @@ -396,21 +398,21 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): archived_repre = archived_repres_by_name.pop( repre_name_low ) - old_id = archived_repre["old_id"] - repre["_id"] = old_id - bulk_writes.append( - ReplaceOne( - {"old_id": old_id}, - repre - ) + repre["_id"] = archived_repre["old_id"] + update_data = prepare_representation_update_data( + archived_repre, repre) + op_session.update_entity( + project_name, + old_repre["type"], + archived_repre["_id"], + update_data ) # Create representation else: - repre["_id"] = ObjectId() - bulk_writes.append( - InsertOne(repre) - ) + repre.pop("_id", None) + op_session.create_entity(project_name, "representation", + repre) self.path_checks = [] @@ -431,28 +433,22 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): archived_repre = archived_repres_by_name.pop( repre_name_low ) - repre["old_id"] = repre["_id"] - repre["_id"] = archived_repre["_id"] - repre["type"] = archived_repre["type"] - bulk_writes.append( - ReplaceOne( - {"_id": archived_repre["_id"]}, - repre - ) - ) + changes = {"old_id": repre["_id"], + "_id": archived_repre["_id"], + "type": archived_repre["type"]} + op_session.update_entity(project_name, + archived_repre["type"], + archived_repre["_id"], + changes) else: - repre["old_id"] = repre["_id"] - repre["_id"] = ObjectId() + repre["old_id"] = repre.pop("_id") repre["type"] = "archived_representation" - bulk_writes.append( - InsertOne(repre) - ) + op_session.create_entity(project_name, + "archived_representation", + repre) - if bulk_writes: - legacy_io.database[project_name].bulk_write( - bulk_writes - ) + op_session.commit() # Remove backuped previous hero if ( @@ -527,30 +523,24 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): return publish_folder - def _get_template_key(self, instance): + def _get_template_key(self, project_name, instance): anatomy_data = instance.data["anatomyData"] - task_data = anatomy_data.get("task") or {} - task_name = task_data.get("name") - task_type = task_data.get("type") + task_info = anatomy_data.get("task") or {} host_name = instance.context.data["hostName"] + # TODO raise error if Hero not set? family = self.main_family_from_instance(instance) - key_values = { - "families": family, - "task_names": task_name, - "task_types": task_type, - "hosts": host_name - } - profile = filter_profiles( - self.template_name_profiles, - key_values, + + return get_publish_template_name( + project_name, + host_name, + family, + task_info.get("name"), + task_info.get("type"), + project_settings=instance.context.data["project_settings"], + hero=True, logger=self.log ) - if profile: - template_name = profile["template_name"] - else: - template_name = self._default_template_name - return template_name def main_family_from_instance(self, instance): """Returns main family of entered instance.""" @@ -584,8 +574,11 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): return except OSError as exc: - # re-raise exception if different than cross drive path - if exc.errno != errno.EXDEV: + # re-raise exception if different than + # EXDEV - cross drive path + # EINVAL - wrong format, must be NTFS + self.log.debug("Hardlink failed with errno:'{}'".format(exc.errno)) + if exc.errno not in [errno.EXDEV, errno.EINVAL]: raise shutil.copy(src_path, dst_path) diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index b90b61f587..536ab83f2c 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -15,7 +15,6 @@ from bson.objectid import ObjectId from pymongo import DeleteOne, InsertOne import pyblish.api -import openpype.api from openpype.client import ( get_asset_by_name, get_subset_by_id, @@ -25,14 +24,17 @@ from openpype.client import ( get_representations, get_archived_representations, ) -from openpype.lib.profiles_filtering import filter_profiles from openpype.lib import ( prepare_template_data, create_hard_link, StringTemplate, - TemplateUnsolved + TemplateUnsolved, + source_hash, + filter_profiles, + get_local_site_id, ) from openpype.pipeline import legacy_io +from openpype.pipeline.publish import get_publish_template_name # this is needed until speedcopy for linux is fixed if sys.platform == "win32": @@ -127,7 +129,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): exclude_families = ["render.farm"] db_representation_context_keys = [ "project", "asset", "task", "subset", "version", "representation", - "family", "hierarchy", "task", "username" + "family", "hierarchy", "task", "username", "user" ] default_template_name = "publish" @@ -138,7 +140,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): integrated_file_sizes = {} # Attributes set by settings - template_name_profiles = None subset_grouping_profiles = None def process(self, instance): @@ -388,22 +389,16 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): family = self.main_family_from_instance(instance) - key_values = { - "families": family, - "tasks": task_name, - "hosts": instance.context.data["hostName"], - "task_types": task_type - } - profile = filter_profiles( - self.template_name_profiles, - key_values, + template_name = get_publish_template_name( + project_name, + instance.context.data["hostName"], + family, + task_name=task_info.get("name"), + task_type=task_info.get("type"), + project_settings=instance.context.data["project_settings"], logger=self.log ) - template_name = "publish" - if profile: - template_name = profile["template_name"] - published_representations = {} for idx, repre in enumerate(repres): published_files = [] @@ -1058,7 +1053,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): for _src, dest in resources: path = self.get_rootless_path(anatomy, dest) dest = self.get_dest_temp_url(dest) - file_hash = openpype.api.source_hash(dest) + file_hash = source_hash(dest) if self.TMP_FILE_EXT and \ ',{}'.format(self.TMP_FILE_EXT) in file_hash: file_hash = file_hash.replace(',{}'.format(self.TMP_FILE_EXT), @@ -1168,7 +1163,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def _get_sites(self, sync_project_presets): """Returns tuple (local_site, remote_site)""" - local_site_id = openpype.api.get_local_site_id() + local_site_id = get_local_site_id() local_site = sync_project_presets["config"]. \ get("active_site", "studio").strip() diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index 8ae0dd2d60..e7046ba2ea 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -1,3 +1,13 @@ +""" Integrate Thumbnails for Openpype use in Loaders. + + This thumbnail is different from 'thumbnail' representation which could + be uploaded to Ftrack, or used as any other representation in Loaders to + pull into a scene. + + This one is used only as image describing content of published item and + shows up only in Loader in right column section. +""" + import os import sys import errno @@ -6,14 +16,13 @@ import copy import six import pyblish.api -from bson.objectid import ObjectId from openpype.client import get_version_by_id -from openpype.pipeline import legacy_io +from openpype.client.operations import OperationsSession, new_thumbnail_doc class IntegrateThumbnails(pyblish.api.InstancePlugin): - """Integrate Thumbnails.""" + """Integrate Thumbnails for Openpype use in Loaders.""" label = "Integrate Thumbnails" order = pyblish.api.IntegratorOrder + 0.01 @@ -24,13 +33,9 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): ] def process(self, instance): - - if not os.environ.get("AVALON_THUMBNAIL_ROOT"): - self.log.warning( - "AVALON_THUMBNAIL_ROOT is not set." - " Skipping thumbnail integration." - ) - return + env_key = "AVALON_THUMBNAIL_ROOT" + thumbnail_root_format_key = "{thumbnail_root}" + thumbnail_root = os.environ.get(env_key) or "" published_repres = instance.data.get("published_representations") if not published_repres: @@ -51,6 +56,16 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): ).format(project_name)) return + thumbnail_template = anatomy.templates["publish"]["thumbnail"] + if ( + not thumbnail_root + and thumbnail_root_format_key in thumbnail_template + ): + self.log.warning(( + "{} is not set. Skipping thumbnail integration." + ).format(env_key)) + return + thumb_repre = None thumb_repre_anatomy_data = None for repre_info in published_repres.values(): @@ -66,10 +81,6 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): ) return - legacy_io.install() - - thumbnail_template = anatomy.templates["publish"]["thumbnail"] - version = get_version_by_id(project_name, thumb_repre["parent"]) if not version: raise AssertionError( @@ -88,14 +99,15 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): filename, file_extension = os.path.splitext(src_full_path) # Create id for mongo entity now to fill anatomy template - thumbnail_id = ObjectId() + thumbnail_doc = new_thumbnail_doc() + thumbnail_id = thumbnail_doc["_id"] # Prepare anatomy template fill data template_data = copy.deepcopy(thumb_repre_anatomy_data) template_data.update({ "_id": str(thumbnail_id), - "thumbnail_root": os.environ.get("AVALON_THUMBNAIL_ROOT"), "ext": file_extension[1:], + "thumbnail_root": thumbnail_root, "thumbnail_type": "thumbnail" }) @@ -117,8 +129,8 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): shutil.copy(src_full_path, dst_full_path) # Clean template data from keys that are dynamic - template_data.pop("_id") - template_data.pop("thumbnail_root") + for key in ("_id", "thumbnail_root"): + template_data.pop(key, None) repre_context = template_filled.used_values for key in self.required_context_keys: @@ -127,34 +139,40 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): continue repre_context[key] = template_data[key] - thumbnail_entity = { - "_id": thumbnail_id, - "type": "thumbnail", - "schema": "openpype:thumbnail-1.0", - "data": { - "template": thumbnail_template, - "template_data": repre_context - } + op_session = OperationsSession() + + thumbnail_doc["data"] = { + "template": thumbnail_template, + "template_data": repre_context } - # Create thumbnail entity - legacy_io.insert_one(thumbnail_entity) - self.log.debug( - "Creating entity in database {}".format(str(thumbnail_entity)) + op_session.create_entity( + project_name, thumbnail_doc["type"], thumbnail_doc ) + # Create thumbnail entity + self.log.debug( + "Creating entity in database {}".format(str(thumbnail_doc)) + ) + # Set thumbnail id for version - legacy_io.update_many( - {"_id": version["_id"]}, - {"$set": {"data.thumbnail_id": thumbnail_id}} + op_session.update_entity( + project_name, + version["type"], + version["_id"], + {"data.thumbnail_id": thumbnail_id} ) self.log.debug("Setting thumbnail for version \"{}\" <{}>".format( version["name"], str(version["_id"]) )) asset_entity = instance.data["assetEntity"] - legacy_io.update_many( - {"_id": asset_entity["_id"]}, - {"$set": {"data.thumbnail_id": thumbnail_id}} + op_session.update_entity( + project_name, + asset_entity["type"], + asset_entity["_id"], + {"data.thumbnail_id": thumbnail_id} ) self.log.debug("Setting thumbnail for asset \"{}\" <{}>".format( asset_entity["name"], str(version["_id"]) )) + + op_session.commit() diff --git a/openpype/plugins/publish/preintegrate_thumbnail_representation.py b/openpype/plugins/publish/preintegrate_thumbnail_representation.py new file mode 100644 index 0000000000..f9e23223e6 --- /dev/null +++ b/openpype/plugins/publish/preintegrate_thumbnail_representation.py @@ -0,0 +1,72 @@ +""" Marks thumbnail representation for integrate to DB or not. + + Some hosts produce thumbnail representation, most of them do not create + them explicitly, but they created during extract phase. + + In some cases it might be useful to override implicit setting for host/task + + This plugin needs to run after extract phase, but before integrate.py as + thumbnail is part of review family and integrated there. + + It should be better to control integration of thumbnail in one place than + configure it in multiple places on host implementations. +""" +import pyblish.api + +from openpype.lib.profiles_filtering import filter_profiles + + +class PreIntegrateThumbnails(pyblish.api.InstancePlugin): + """Marks thumbnail representation for integrate to DB or not.""" + + label = "Override Integrate Thumbnail Representations" + order = pyblish.api.IntegratorOrder - 0.1 + families = ["review"] + + integrate_profiles = {} + + def process(self, instance): + repres = instance.data.get("representations") + if not repres: + return + + thumbnail_repre = None + for repre in repres: + if repre["name"] == "thumbnail": + thumbnail_repre = repre + break + + if not thumbnail_repre: + return + + family = instance.data["family"] + subset_name = instance.data["subset"] + host_name = instance.context.data["hostName"] + + anatomy_data = instance.data["anatomyData"] + task = anatomy_data.get("task", {}) + + found_profile = filter_profiles( + self.integrate_profiles, + { + "hosts": host_name, + "task_names": task.get("name"), + "task_types": task.get("type"), + "families": family, + "subsets": subset_name, + }, + logger=self.log + ) + + if not found_profile: + return + + if not found_profile["integrate_thumbnail"]: + if "delete" not in thumbnail_repre["tags"]: + thumbnail_repre["tags"].append("delete") + else: + if "delete" in thumbnail_repre["tags"]: + thumbnail_repre["tags"].remove("delete") + + self.log.debug( + "Thumbnail repre tags {}".format(thumbnail_repre["tags"])) diff --git a/openpype/plugins/publish/validate_resources.py b/openpype/plugins/publish/validate_resources.py index 644977ecd4..7911c70c2d 100644 --- a/openpype/plugins/publish/validate_resources.py +++ b/openpype/plugins/publish/validate_resources.py @@ -1,7 +1,6 @@ -import pyblish.api -import openpype.api - import os +import pyblish.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateResources(pyblish.api.InstancePlugin): @@ -17,7 +16,7 @@ class ValidateResources(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Resources" def process(self, instance): diff --git a/openpype/plugins/publish/validate_version.py b/openpype/plugins/publish/validate_version.py index b94152ef2d..b91633430f 100644 --- a/openpype/plugins/publish/validate_version.py +++ b/openpype/plugins/publish/validate_version.py @@ -10,7 +10,8 @@ class ValidateVersion(pyblish.api.InstancePlugin): order = pyblish.api.ValidatorOrder label = "Validate Version" - hosts = ["nuke", "maya", "houdini", "blender", "standalonepublisher"] + hosts = ["nuke", "maya", "houdini", "blender", "standalonepublisher", + "photoshop", "aftereffects"] optional = False active = True diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 66bf5e9bb4..d08a812c61 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -4,20 +4,7 @@ import os import sys import json import time - -from openpype.lib import PypeLogger -from openpype.api import get_app_environments_for_context -from openpype.lib.plugin_tools import get_batch_asset_task_info -from openpype.lib.remote_publish import ( - get_webpublish_conn, - start_webpublish_log, - publish_and_log, - fail_batch, - find_variant_key, - get_task_data, - get_timeout, - IN_PROGRESS_STATUS -) +import signal class PypeCommands: @@ -27,10 +14,11 @@ class PypeCommands: """ @staticmethod def launch_tray(): - PypeLogger.set_process_name("Tray") - + from openpype.lib import Logger from openpype.tools import tray + Logger.set_process_name("Tray") + tray.main() @staticmethod @@ -47,10 +35,12 @@ class PypeCommands: @staticmethod def add_modules(click_func): """Modules/Addons can add their cli commands dynamically.""" + + from openpype.lib import Logger from openpype.modules import ModulesManager manager = ModulesManager() - log = PypeLogger.get_logger("AddModulesCLI") + log = Logger.get_logger("CLI-AddModules") for module in manager.modules: try: module.cli(click_func) @@ -72,8 +62,8 @@ class PypeCommands: @staticmethod def launch_webpublisher_webservercli(*args, **kwargs): - from openpype.hosts.webpublisher.webserver_service.webserver_cli \ - import (run_webserver) + from openpype.hosts.webpublisher.webserver_service import run_webserver + return run_webserver(*args, **kwargs) @staticmethod @@ -96,10 +86,11 @@ class PypeCommands: Raises: RuntimeError: When there is no path to process. """ + + from openpype.lib import Logger + from openpype.lib.applications import get_app_environments_for_context from openpype.modules import ModulesManager from openpype.pipeline import install_openpype_plugins - - from openpype.api import Logger from openpype.tools.utils.host_tools import show_publish from openpype.tools.utils.lib import qt_app_context @@ -107,7 +98,7 @@ class PypeCommands: import pyblish.api import pyblish.util - log = Logger.get_logger() + log = Logger.get_logger("CLI-publish") install_openpype_plugins() @@ -195,92 +186,14 @@ class PypeCommands: targets (list): Pyblish targets (to choose validator for example) """ - import pyblish.api - from openpype.api import Logger - from openpype.lib import ApplicationManager - log = Logger.get_logger() - - log.info("remotepublishphotoshop command") - - task_data = get_task_data(batch_path) - - workfile_path = os.path.join(batch_path, - task_data["task"], - task_data["files"][0]) - - print("workfile_path {}".format(workfile_path)) - - batch_id = task_data["batch"] - dbcon = get_webpublish_conn() - # safer to start logging here, launch might be broken altogether - _id = start_webpublish_log(dbcon, batch_id, user_email) - - batches_in_progress = list(dbcon.find({"status": IN_PROGRESS_STATUS})) - if len(batches_in_progress) > 1: - running_batches = [str(batch["_id"]) - for batch in batches_in_progress - if batch["_id"] != _id] - msg = "There are still running batches {}\n". \ - format("\n".join(running_batches)) - msg += "Ask admin to check them and reprocess current batch" - fail_batch(_id, dbcon, msg) - print("Another batch running, probably stuck, ask admin for help") - - asset_name, task_name, task_type = get_batch_asset_task_info( - task_data["context"]) - - application_manager = ApplicationManager() - found_variant_key = find_variant_key(application_manager, host_name) - app_name = "{}/{}".format(host_name, found_variant_key) - - # must have for proper launch of app - env = get_app_environments_for_context( - project_name, - asset_name, - task_name, - app_name + from openpype.hosts.webpublisher.publish_functions import ( + cli_publish_from_app ) - print("env:: {}".format(env)) - os.environ.update(env) - os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path - # must pass identifier to update log lines for a batch - os.environ["BATCH_LOG_ID"] = str(_id) - os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib - os.environ["USER_EMAIL"] = user_email - - pyblish.api.register_host(host_name) - if targets: - if isinstance(targets, str): - targets = [targets] - current_targets = os.environ.get("PYBLISH_TARGETS", "").split( - os.pathsep) - for target in targets: - current_targets.append(target) - - os.environ["PYBLISH_TARGETS"] = os.pathsep.join( - set(current_targets)) - - data = { - "last_workfile_path": workfile_path, - "start_last_workfile": True, - "project_name": project_name, - "asset_name": asset_name, - "task_name": task_name - } - - launched_app = application_manager.launch(app_name, **data) - - timeout = get_timeout(project_name, host_name, task_type) - - time_start = time.time() - while launched_app.poll() is None: - time.sleep(0.5) - if time.time() - time_start > timeout: - launched_app.terminate() - msg = "Timeout reached" - fail_batch(_id, dbcon, msg) + cli_publish_from_app( + project_name, batch_path, host_name, user_email, targets + ) @staticmethod def remotepublish(project, batch_path, user_email, targets=None): @@ -304,46 +217,12 @@ class PypeCommands: Raises: RuntimeError: When there is no path to process. """ - if not batch_path: - raise RuntimeError("No publish paths specified") - # Register target and host - import pyblish.api - import pyblish.util + from openpype.hosts.webpublisher.publish_functions import ( + cli_publish + ) - from openpype.pipeline import install_host - from openpype.hosts.webpublisher import api as webpublisher - - log = PypeLogger.get_logger() - - log.info("remotepublish command") - - host_name = "webpublisher" - os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path - os.environ["AVALON_PROJECT"] = project - os.environ["AVALON_APP"] = host_name - os.environ["USER_EMAIL"] = user_email - os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib - - pyblish.api.register_host(host_name) - - if targets: - if isinstance(targets, str): - targets = [targets] - for target in targets: - pyblish.api.register_target(target) - - install_host(webpublisher) - - log.info("Running publish ...") - - _, batch_id = os.path.split(batch_path) - dbcon = get_webpublish_conn() - _id = start_webpublish_log(dbcon, batch_id, user_email) - - publish_and_log(dbcon, _id, log, batch_id=batch_id) - - log.info("Publish finished.") + cli_publish(project, batch_path, user_email, targets) @staticmethod def extractenvironments(output_json_path, project, asset, task, app, @@ -352,8 +231,10 @@ class PypeCommands: Called by Deadline plugin to propagate environment into render jobs. """ + + from openpype.lib.applications import get_app_environments_for_context + if all((project, asset, task, app)): - from openpype.api import get_app_environments_for_context env = get_app_environments_for_context( project, asset, task, app, env_group ) @@ -435,8 +316,12 @@ class PypeCommands: pytest.main(args) def syncserver(self, active_site): - """Start running sync_server in background.""" - import signal + """Start running sync_server in background. + + This functionality is available in directly in module cli commands. + `~/openpype_console module sync_server syncservice` + """ + os.environ["OPENPYPE_LOCAL_ID"] = active_site def signal_handler(sig, frame): @@ -455,7 +340,6 @@ class PypeCommands: sync_server_module.server_init() sync_server_module.server_start() - import time while True: time.sleep(1.0) diff --git a/openpype/scripts/remote_publish.py b/openpype/scripts/remote_publish.py index d322f369d1..37df35e36c 100644 --- a/openpype/scripts/remote_publish.py +++ b/openpype/scripts/remote_publish.py @@ -1,11 +1,12 @@ try: - from openpype.api import Logger - import openpype.lib.remote_publish + from openpype.lib import Logger + from openpype.pipeline.publish.lib import remote_publish except ImportError as exc: # Ensure Deadline fails by output an error that contains "Fatal Error:" raise ImportError("Fatal Error: %s" % exc) + if __name__ == "__main__": # Perform remote publish with thorough error checking log = Logger.get_logger(__name__) - openpype.lib.remote_publish.publish(log, raise_error=True) + remote_publish(log, raise_error=True) diff --git a/openpype/settings/defaults/project_anatomy/templates.json b/openpype/settings/defaults/project_anatomy/templates.json index caf399a903..3415c4451f 100644 --- a/openpype/settings/defaults/project_anatomy/templates.json +++ b/openpype/settings/defaults/project_anatomy/templates.json @@ -29,7 +29,7 @@ "delivery": {}, "unreal": { "folder": "{root[work]}/{project[name]}/unreal/{task[name]}", - "file": "{project[code]}_{asset}", + "file": "{project[code]}_{asset}.{ext}", "path": "{@folder}/{@file}" }, "others": { diff --git a/openpype/settings/defaults/project_settings/blender.json b/openpype/settings/defaults/project_settings/blender.json index a7262dcb5d..7acecfaae0 100644 --- a/openpype/settings/defaults/project_settings/blender.json +++ b/openpype/settings/defaults/project_settings/blender.json @@ -2,5 +2,69 @@ "workfile_builder": { "create_first_version": false, "custom_templates": [] + }, + "publish": { + "ValidateCameraZeroKeyframe": { + "enabled": true, + "optional": true, + "active": true + }, + "ValidateMeshHasUvs": { + "enabled": true, + "optional": true, + "active": true + }, + "ValidateMeshNoNegativeScale": { + "enabled": true, + "optional": false, + "active": true + }, + "ValidateTransformZero": { + "enabled": true, + "optional": false, + "active": true + }, + "ExtractBlend": { + "enabled": true, + "optional": true, + "active": true, + "families": [ + "model", + "camera", + "rig", + "action", + "layout" + ] + }, + "ExtractFBX": { + "enabled": true, + "optional": true, + "active": false + }, + "ExtractABC": { + "enabled": true, + "optional": true, + "active": false + }, + "ExtractBlendAnimation": { + "enabled": true, + "optional": true, + "active": true + }, + "ExtractAnimationFBX": { + "enabled": true, + "optional": true, + "active": false + }, + "ExtractCamera": { + "enabled": true, + "optional": true, + "active": true + }, + "ExtractLayout": { + "enabled": true, + "optional": true, + "active": false + } } } \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index bfdc58d9ee..0f3080ad64 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -1,4 +1,23 @@ { + "imageio": { + "project": { + "colourPolicy": "ACES 1.1", + "frameDepth": "16-bit fp", + "fieldDominance": "PROGRESSIVE" + }, + "profilesMapping": { + "inputs": [ + { + "flameName": "ACEScg", + "ocioName": "ACES - ACEScg" + }, + { + "flameName": "Rec.709 video", + "ocioName": "Output - Rec.709" + } + ] + } + }, "create": { "CreateShotClip": { "hierarchy": "{folder}/{sequence}", @@ -17,7 +36,9 @@ "workfileFrameStart": 1001, "handleStart": 5, "handleEnd": 5, - "includeHandles": false + "includeHandles": false, + "retimedHandles": true, + "retimedFramerange": true } }, "publish": { diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 2d5f889aa5..cdf861df4a 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -56,7 +56,7 @@ "Not Ready" ], "__ignore__": [ - "in prgoress", + "in progress", "omitted", "on hold" ] @@ -96,10 +96,6 @@ "mapping": {}, "asset_types_to_skip": [] }, - "first_version_status": { - "enabled": true, - "status": "" - }, "next_task_update": { "enabled": true, "mapping": { @@ -459,7 +455,7 @@ "family_mapping": { "camera": "cam", "look": "look", - "mayaascii": "scene", + "mayaAscii": "scene", "model": "geo", "rig": "rig", "setdress": "setdress", diff --git a/openpype/settings/defaults/project_settings/fusion.json b/openpype/settings/defaults/project_settings/fusion.json new file mode 100644 index 0000000000..1b4c4c55b5 --- /dev/null +++ b/openpype/settings/defaults/project_settings/fusion.json @@ -0,0 +1,12 @@ +{ + "imageio": { + "ocio": { + "enabled": false, + "configFilePath": { + "windows": [], + "darwin": [], + "linux": [] + } + } + } +} \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 0ff9363ba7..1b7dc7a41a 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -3,6 +3,10 @@ "CollectAnatomyInstanceData": { "follow_workfile_version": false }, + "CollectAudio": { + "enabled": false, + "audio_subset_name": "audioMain" + }, "CollectSceneVersion": { "hosts": [ "aftereffects", @@ -160,6 +164,10 @@ } ] }, + "PreIntegrateThumbnails": { + "enabled": true, + "integrate_profiles": [] + }, "IntegrateSubsetGroup": { "subset_grouping_profiles": [ { @@ -403,7 +411,8 @@ "enabled": false } ], - "extra_folders": [] + "extra_folders": [], + "workfile_lock_profiles": [] }, "loader": { "family_filter_profiles": [ @@ -414,6 +423,10 @@ "filter_families": [] } ] + }, + "publish": { + "template_name_profiles": [], + "hero_template_name_profiles": [] } }, "project_folder_structure": "{\"__project_root__\": {\"prod\": {}, \"resources\": {\"footage\": {\"plates\": {}, \"offline\": {}}, \"audio\": {}, \"art_dept\": {}}, \"editorial\": {}, \"assets\": {\"characters\": {}, \"locations\": {}}, \"shots\": {}}}", diff --git a/openpype/settings/defaults/project_settings/hiero.json b/openpype/settings/defaults/project_settings/hiero.json index e9e7199330..d2ba697305 100644 --- a/openpype/settings/defaults/project_settings/hiero.json +++ b/openpype/settings/defaults/project_settings/hiero.json @@ -1,4 +1,29 @@ { + "imageio": { + "workfile": { + "ocioConfigName": "nuke-default", + "ocioconfigpath": { + "windows": [], + "darwin": [], + "linux": [] + }, + "workingSpace": "linear", + "sixteenBitLut": "sRGB", + "eightBitLut": "sRGB", + "floatLut": "linear", + "logLut": "Cineon", + "viewerLut": "sRGB", + "thumbnailLut": "sRGB" + }, + "regexInputs": { + "inputs": [ + { + "regex": "[^-a-zA-Z0-9](plateRef).*(?=mp4)", + "colorspace": "sRGB" + } + ] + } + }, "create": { "CreateShotClip": { "hierarchy": "{folder}/{sequence}", diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json index 911bf82d9b..1517983569 100644 --- a/openpype/settings/defaults/project_settings/houdini.json +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -1,4 +1,5 @@ { + "shelves": [], "create": { "CreateArnoldAss": { "enabled": true, @@ -47,6 +48,18 @@ } }, "publish": { + "ValidateWorkfilePaths": { + "enabled": true, + "optional": true, + "node_types": [ + "file", + "alembic" + ], + "prohibited_vars": [ + "$HIP", + "$JOB" + ] + }, "ValidateContainers": { "enabled": true, "optional": true, diff --git a/openpype/settings/defaults/project_settings/kitsu.json b/openpype/settings/defaults/project_settings/kitsu.json index ba02d8d259..3a9723b9c0 100644 --- a/openpype/settings/defaults/project_settings/kitsu.json +++ b/openpype/settings/defaults/project_settings/kitsu.json @@ -1,8 +1,4 @@ { - "entities_root": { - "assets": "Assets", - "shots": "Shots" - }, "entities_naming_pattern": { "episode": "E##", "sequence": "SQ##", diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 28f6d23e4d..86815b8fc4 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -1,4 +1,27 @@ { + "imageio": { + "colorManagementPreference_v2": { + "enabled": true, + "configFilePath": { + "windows": [], + "darwin": [], + "linux": [] + }, + "renderSpace": "ACEScg", + "displayName": "sRGB", + "viewName": "ACES 1.0 SDR-video" + }, + "colorManagementPreference": { + "configFilePath": { + "windows": [], + "darwin": [], + "linux": [] + }, + "renderSpace": "scene-linear Rec 709/sRGB", + "viewTransform": "sRGB gamma" + } + }, + "mel_workspace": "workspace -fr \"shaders\" \"renderData/shaders\";\nworkspace -fr \"images\" \"renders/maya\";\nworkspace -fr \"particles\" \"particles\";\nworkspace -fr \"mayaAscii\" \"\";\nworkspace -fr \"mayaBinary\" \"\";\nworkspace -fr \"scene\" \"\";\nworkspace -fr \"alembicCache\" \"cache/alembic\";\nworkspace -fr \"renderData\" \"renderData\";\nworkspace -fr \"sourceImages\" \"sourceimages\";\nworkspace -fr \"fileCache\" \"cache/nCache\";\n", "ext_mapping": { "model": "ma", "mayaAscii": "ma", @@ -33,12 +56,12 @@ }, "RenderSettings": { "apply_render_settings": true, - "default_render_image_folder": "renders", - "enable_all_lights": false, + "default_render_image_folder": "renders/maya", + "enable_all_lights": true, "aov_separator": "underscore", "reset_current_frame": false, "arnold_renderer": { - "image_prefix": "maya///_", + "image_prefix": "//_", "image_format": "exr", "multilayer_exr": true, "tiled": true, @@ -46,17 +69,17 @@ "additional_options": [] }, "vray_renderer": { - "image_prefix": "maya///", + "image_prefix": "//", "engine": "1", - "image_format": "png", + "image_format": "exr", "aov_list": [], "additional_options": [] }, "redshift_renderer": { - "image_prefix": "maya///", + "image_prefix": "//", "primary_gi_engine": "0", "secondary_gi_engine": "0", - "image_format": "iff", + "image_format": "exr", "multilayer_exr": true, "force_combine": true, "aov_list": [], @@ -103,6 +126,7 @@ "CreateAnimation": { "enabled": true, "write_color_sets": false, + "write_face_sets": false, "defaults": [ "Main" ] @@ -110,6 +134,7 @@ "CreatePointCache": { "enabled": true, "write_color_sets": false, + "write_face_sets": false, "defaults": [ "Main" ] @@ -164,6 +189,8 @@ }, "CreateModel": { "enabled": true, + "write_color_sets": false, + "write_face_sets": false, "defaults": [ "Main", "Proxy", @@ -677,25 +704,20 @@ "isolate_view": true, "off_screen": true }, - "PanZoom": { - "pan_zoom": true - }, "Renderer": { "rendererName": "vp2Renderer" }, "Resolution": { "width": 1920, - "height": 1080, - "percent": 1.0, - "mode": "Custom" + "height": 1080 }, "Viewport Options": { "override_viewport_options": true, "displayLights": "default", + "displayTextures": true, "textureMaxResolution": 1024, "renderDepthOfField": true, "shadows": true, - "textures": true, "twoSidedLighting": true, "lineAAEnable": true, "multiSample": 8, @@ -718,7 +740,6 @@ "motionBlurShutterOpenFraction": 0.2, "cameras": false, "clipGhosts": false, - "controlVertices": false, "deformers": false, "dimensions": false, "dynamicConstraints": false, @@ -730,8 +751,7 @@ "grid": false, "hairSystems": true, "handles": false, - "hud": false, - "hulls": false, + "headsUpDisplay": false, "ikHandles": false, "imagePlane": true, "joints": false, @@ -742,7 +762,9 @@ "nCloths": false, "nParticles": false, "nRigids": false, + "controlVertices": false, "nurbsCurves": false, + "hulls": false, "nurbsSurfaces": false, "particleInstancers": false, "pivots": false, @@ -750,7 +772,8 @@ "pluginShapes": false, "polymeshes": true, "strokes": false, - "subdivSurfaces": false + "subdivSurfaces": false, + "textures": false }, "Camera Options": { "displayGateMask": false, @@ -980,4 +1003,4 @@ "ValidateNoAnimation": false } } -} +} \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index 3e29122074..e5cbacbda7 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -8,6 +8,197 @@ "build_workfile": "ctrl+alt+b" } }, + "imageio": { + "enabled": false, + "viewer": { + "viewerProcess": "sRGB" + }, + "baking": { + "viewerProcess": "rec709" + }, + "workfile": { + "colorManagement": "Nuke", + "OCIO_config": "nuke-default", + "customOCIOConfigPath": { + "windows": [], + "darwin": [], + "linux": [] + }, + "workingSpaceLUT": "linear", + "monitorLut": "sRGB", + "int8Lut": "sRGB", + "int16Lut": "sRGB", + "logLut": "Cineon", + "floatLut": "linear" + }, + "nodes": { + "requiredNodes": [ + { + "plugins": [ + "CreateWriteRender" + ], + "nukeNodeClass": "Write", + "knobs": [ + { + "type": "text", + "name": "file_type", + "value": "exr" + }, + { + "type": "text", + "name": "datatype", + "value": "16 bit half" + }, + { + "type": "text", + "name": "compression", + "value": "Zip (1 scanline)" + }, + { + "type": "bool", + "name": "autocrop", + "value": true + }, + { + "type": "color_gui", + "name": "tile_color", + "value": [ + 186, + 35, + 35, + 255 + ] + }, + { + "type": "text", + "name": "channels", + "value": "rgb" + }, + { + "type": "text", + "name": "colorspace", + "value": "linear" + }, + { + "type": "bool", + "name": "create_directories", + "value": true + } + ] + }, + { + "plugins": [ + "CreateWritePrerender" + ], + "nukeNodeClass": "Write", + "knobs": [ + { + "type": "text", + "name": "file_type", + "value": "exr" + }, + { + "type": "text", + "name": "datatype", + "value": "16 bit half" + }, + { + "type": "text", + "name": "compression", + "value": "Zip (1 scanline)" + }, + { + "type": "bool", + "name": "autocrop", + "value": true + }, + { + "type": "color_gui", + "name": "tile_color", + "value": [ + 171, + 171, + 10, + 255 + ] + }, + { + "type": "text", + "name": "channels", + "value": "rgb" + }, + { + "type": "text", + "name": "colorspace", + "value": "linear" + }, + { + "type": "bool", + "name": "create_directories", + "value": true + } + ] + }, + { + "plugins": [ + "CreateWriteStill" + ], + "nukeNodeClass": "Write", + "knobs": [ + { + "type": "text", + "name": "file_type", + "value": "tiff" + }, + { + "type": "text", + "name": "datatype", + "value": "16 bit" + }, + { + "type": "text", + "name": "compression", + "value": "Deflate" + }, + { + "type": "color_gui", + "name": "tile_color", + "value": [ + 56, + 162, + 7, + 255 + ] + }, + { + "type": "text", + "name": "channels", + "value": "rgb" + }, + { + "type": "text", + "name": "colorspace", + "value": "sRGB" + }, + { + "type": "bool", + "name": "create_directories", + "value": true + } + ] + } + ], + "overrideNodes": [] + }, + "regexInputs": { + "inputs": [ + { + "regex": "(beauty).*(?=.exr)", + "colorspace": "linear" + } + ] + } + }, "nuke-dirmap": { "enabled": false, "paths": { @@ -131,7 +322,7 @@ "write" ] }, - "ValidateInstanceInContext": { + "ValidateCorrectAssetName": { "enabled": true, "optional": true, "active": true @@ -325,5 +516,8 @@ } ] }, + "templated_workfile_build": { + "profiles": [] + }, "filters": {} } \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/photoshop.json b/openpype/settings/defaults/project_settings/photoshop.json index 758ac64a35..fa0dc7b1c4 100644 --- a/openpype/settings/defaults/project_settings/photoshop.json +++ b/openpype/settings/defaults/project_settings/photoshop.json @@ -8,13 +8,19 @@ }, "publish": { "CollectColorCodedInstances": { - "create_flatten_image": false, + "create_flatten_image": "no", "flatten_subset_template": "", "color_code_mapping": [] }, "CollectInstances": { "flatten_subset_template": "" }, + "CollectReview": { + "publish": true + }, + "CollectVersion": { + "enabled": false + }, "ValidateContainers": { "enabled": true, "optional": true, @@ -34,7 +40,10 @@ "make_image_sequence": false, "max_downscale_size": 8192, "jpg_options": { - "tags": [] + "tags": [ + "review", + "ftrackreview" + ] }, "mov_options": { "tags": [ diff --git a/openpype/settings/defaults/project_settings/unreal.json b/openpype/settings/defaults/project_settings/unreal.json index c5f5cdf719..391e2415a5 100644 --- a/openpype/settings/defaults/project_settings/unreal.json +++ b/openpype/settings/defaults/project_settings/unreal.json @@ -1,5 +1,6 @@ { "level_sequences_for_layouts": false, + "delete_unmatched_assets": false, "project_setup": { "dev_mode": true } diff --git a/openpype/settings/defaults/project_settings/webpublisher.json b/openpype/settings/defaults/project_settings/webpublisher.json index cba472514e..09c7d3ec94 100644 --- a/openpype/settings/defaults/project_settings/webpublisher.json +++ b/openpype/settings/defaults/project_settings/webpublisher.json @@ -10,6 +10,7 @@ ], "publish": { "CollectPublishedFiles": { + "sync_next_version": false, "task_type_to_family": { "Animation": [ { diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 30b0a5cbe3..42eeb06191 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -706,30 +706,28 @@ "icon": "{}/app_icons/fusion.png", "host_name": "fusion", "environment": { - "FUSION_UTILITY_SCRIPTS_SOURCE_DIR": [], - "FUSION_UTILITY_SCRIPTS_DIR": { - "windows": "{PROGRAMDATA}/Blackmagic Design/Fusion/Scripts/Comp", - "darwin": "/Library/Application Support/Blackmagic Design/Fusion/Scripts/Comp", - "linux": "/opt/Fusion/Scripts/Comp" - }, - "PYTHON36": { + "FUSION_PYTHON3_HOME": { "windows": "{LOCALAPPDATA}/Programs/Python/Python36", "darwin": "~/Library/Python/3.6/bin", "linux": "/opt/Python/3.6/bin" - }, - "PYTHONPATH": [ - "{PYTHON36}/Lib/site-packages", - "{VIRTUAL_ENV}/Lib/site-packages", - "{PYTHONPATH}" - ], - "PATH": [ - "{PYTHON36}", - "{PYTHON36}/Scripts", - "{PATH}" - ], - "OPENPYPE_LOG_NO_COLORS": "Yes" + } }, "variants": { + "18": { + "executables": { + "windows": [ + "C:\\Program Files\\Blackmagic Design\\Fusion 18\\Fusion.exe" + ], + "darwin": [], + "linux": [] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": {} + }, "17": { "executables": { "windows": [ @@ -784,41 +782,11 @@ "host_name": "resolve", "environment": { "RESOLVE_UTILITY_SCRIPTS_SOURCE_DIR": [], - "RESOLVE_SCRIPT_API": { - "windows": "{PROGRAMDATA}/Blackmagic Design/DaVinci Resolve/Support/Developer/Scripting", - "darwin": "/Library/Application Support/Blackmagic Design/DaVinci Resolve/Developer/Scripting", - "linux": "/opt/resolve/Developer/Scripting" - }, - "RESOLVE_SCRIPT_LIB": { - "windows": "C:/Program Files/Blackmagic Design/DaVinci Resolve/fusionscript.dll", - "darwin": "/Applications/DaVinci Resolve/DaVinci Resolve.app/Contents/Libraries/Fusion/fusionscript.so", - "linux": "/opt/resolve/libs/Fusion/fusionscript.so" - }, - "RESOLVE_UTILITY_SCRIPTS_DIR": { - "windows": "{PROGRAMDATA}/Blackmagic Design/DaVinci Resolve/Fusion/Scripts/Comp", - "darwin": "/Library/Application Support/Blackmagic Design/DaVinci Resolve/Fusion/Scripts/Comp", - "linux": "/opt/resolve/Fusion/Scripts/Comp" - }, - "PYTHON36_RESOLVE": { + "RESOLVE_PYTHON3_HOME": { "windows": "{LOCALAPPDATA}/Programs/Python/Python36", "darwin": "~/Library/Python/3.6/bin", "linux": "/opt/Python/3.6/bin" - }, - "PYTHONPATH": [ - "{PYTHON36_RESOLVE}/Lib/site-packages", - "{VIRTUAL_ENV}/Lib/site-packages", - "{PYTHONPATH}", - "{RESOLVE_SCRIPT_API}/Modules", - "{PYTHONPATH}" - ], - "PATH": [ - "{PYTHON36_RESOLVE}", - "{PYTHON36_RESOLVE}/Scripts", - "{PATH}" - ], - "PRE_PYTHON_SCRIPT": "{OPENPYPE_REPOS_ROOT}/openpype/resolve/preload_console.py", - "OPENPYPE_LOG_NO_COLORS": "True", - "RESOLVE_DEV": "True" + } }, "variants": { "stable": { diff --git a/openpype/settings/entities/base_entity.py b/openpype/settings/entities/base_entity.py index 741f13c49b..f28fefdf5a 100644 --- a/openpype/settings/entities/base_entity.py +++ b/openpype/settings/entities/base_entity.py @@ -15,7 +15,7 @@ from .exceptions import ( EntitySchemaError ) -from openpype.lib import PypeLogger +from openpype.lib import Logger @six.add_metaclass(ABCMeta) @@ -478,7 +478,7 @@ class BaseItemEntity(BaseEntity): def log(self): """Auto created logger for debugging or warnings.""" if self._log is None: - self._log = PypeLogger.get_logger(self.__class__.__name__) + self._log = Logger.get_logger(self.__class__.__name__) return self._log @abstractproperty diff --git a/openpype/settings/entities/schemas/projects_schema/schema_main.json b/openpype/settings/entities/schemas/projects_schema/schema_main.json index 80b1baad1b..0b9fbf7470 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_main.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_main.json @@ -90,6 +90,10 @@ "type": "schema", "name": "schema_project_nuke" }, + { + "type": "schema", + "name": "schema_project_fusion" + }, { "type": "schema", "name": "schema_project_hiero" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_blender.json b/openpype/settings/entities/schemas/projects_schema/schema_project_blender.json index af09329a03..4c72ebda2f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_blender.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_blender.json @@ -12,6 +12,10 @@ "workfile_builder/builder_on_start", "workfile_builder/profiles" ] + }, + { + "type": "schema", + "name": "schema_blender_publish" } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json index ca62679b3d..73664300aa 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json @@ -5,6 +5,69 @@ "label": "Flame", "is_file": true, "children": [ + { + "key": "imageio", + "type": "dict", + "label": "Color Management (ImageIO)", + "is_group": true, + "children": [ + { + "key": "project", + "type": "dict", + "label": "Project", + "collapsible": false, + "children": [ + { + "type": "form", + "children": [ + { + "type": "text", + "key": "colourPolicy", + "label": "Colour Policy (name or path)" + }, + { + "type": "text", + "key": "frameDepth", + "label": "Image Depth" + }, + { + "type": "text", + "key": "fieldDominance", + "label": "Field Dominance" + } + ] + } + ] + }, + { + "key": "profilesMapping", + "type": "dict", + "label": "Profile names mapping", + "collapsible": true, + "children": [ + { + "type": "list", + "key": "inputs", + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "flameName", + "label": "Flame name" + }, + { + "type": "text", + "key": "ocioName", + "label": "OCIO name" + } + ] + } + } + ] + } + ] + }, { "type": "dict", "collapsible": true, @@ -128,6 +191,16 @@ "type": "boolean", "key": "includeHandles", "label": "Enable handles including" + }, + { + "type": "boolean", + "key": "retimedHandles", + "label": "Enable retimed handles" + }, + { + "type": "boolean", + "key": "retimedFramerange", + "label": "Enable retimed shot frameranges" } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index 5ff8b87427..da414cc961 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -299,24 +299,6 @@ } ] }, - { - "type": "dict", - "key": "first_version_status", - "label": "Set status on first created version", - "checkbox_key": "enabled", - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - }, - { - "type": "text", - "key": "status", - "label": "Status" - } - ] - }, { "type": "dict", "key": "next_task_update", @@ -986,7 +968,7 @@ { "type": "dict", "key": "IntegrateFtrackInstance", - "label": "IntegrateFtrackInstance", + "label": "Integrate Ftrack Instance", "is_group": true, "children": [ { diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_fusion.json b/openpype/settings/entities/schemas/projects_schema/schema_project_fusion.json new file mode 100644 index 0000000000..8f98a8173f --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_fusion.json @@ -0,0 +1,38 @@ +{ + "type": "dict", + "collapsible": true, + "key": "fusion", + "label": "Fusion", + "is_file": true, + "children": [ + { + "key": "imageio", + "type": "dict", + "label": "Color Management (ImageIO)", + "collapsible": true, + "children": [ + { + "key": "ocio", + "type": "dict", + "label": "OpenColorIO (OCIO)", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Set OCIO variable for Fusion" + }, + { + "type": "path", + "key": "configFilePath", + "label": "OCIO Config File Path", + "multiplatform": true, + "multipath": true + } + ] + } + ] + } + ] +} diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_hiero.json b/openpype/settings/entities/schemas/projects_schema/schema_project_hiero.json index 3108d2197e..9e18522def 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_hiero.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_hiero.json @@ -5,6 +5,116 @@ "label": "Hiero", "is_file": true, "children": [ + { + "key": "imageio", + "type": "dict", + "label": "Color Management (ImageIO)", + "is_group": true, + "collapsible": true, + "children": [ + { + "key": "workfile", + "type": "dict", + "label": "Workfile", + "collapsible": false, + "children": [ + { + "type": "form", + "children": [ + { + "type": "enum", + "key": "ocioConfigName", + "label": "OpenColorIO Config", + "enum_items": [ + { + "nuke-default": "nuke-default" + }, + { + "aces_1.0.3": "aces_1.0.3" + }, + { + "aces_1.1": "aces_1.1" + }, + { + "custom": "custom" + } + ] + }, + { + "type": "path", + "key": "ocioconfigpath", + "label": "Custom OCIO path", + "multiplatform": true, + "multipath": true + }, + { + "type": "text", + "key": "workingSpace", + "label": "Working Space" + }, + { + "type": "text", + "key": "sixteenBitLut", + "label": "16 Bit Files" + }, + { + "type": "text", + "key": "eightBitLut", + "label": "8 Bit Files" + }, + { + "type": "text", + "key": "floatLut", + "label": "Floating Point Files" + }, + { + "type": "text", + "key": "logLut", + "label": "Log Files" + }, + { + "type": "text", + "key": "viewerLut", + "label": "Viewer" + }, + { + "type": "text", + "key": "thumbnailLut", + "label": "Thumbnails" + } + ] + } + ] + }, + { + "key": "regexInputs", + "type": "dict", + "label": "Colorspace on Inputs by regex detection", + "collapsible": true, + "children": [ + { + "type": "list", + "key": "inputs", + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "regex", + "label": "Regex" + }, + { + "type": "text", + "key": "colorspace", + "label": "Colorspace" + } + ] + } + } + ] + } + ] + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json b/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json index cad99dde22..808f154226 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json @@ -5,27 +5,17 @@ "label": "Houdini", "is_file": true, "children": [ + { + "type": "schema", + "name": "schema_houdini_scriptshelf" + }, { "type": "schema", "name": "schema_houdini_create" }, { - "type": "dict", - "collapsible": true, - "key": "publish", - "label": "Publish plugins", - "children": [ - { - "type": "schema_template", - "name": "template_publish_plugin", - "template_data": [ - { - "key": "ValidateContainers", - "label": "ValidateContainers" - } - ] - } - ] + "type": "schema", + "name": "schema_houdini_publish" } ] -} +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_kitsu.json b/openpype/settings/entities/schemas/projects_schema/schema_project_kitsu.json index 014a1b7886..fb47670e74 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_kitsu.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_kitsu.json @@ -5,23 +5,6 @@ "collapsible": true, "is_file": true, "children": [ - { - "type": "dict", - "key": "entities_root", - "label": "Entities root folder", - "children": [ - { - "type": "text", - "key": "assets", - "label": "Assets:" - }, - { - "type": "text", - "key": "shots", - "label": "Shots (includes Episodes & Sequences if any):" - } - ] - }, { "type": "dict", "key": "entities_naming_pattern", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index 816874779e..b2d79797a3 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -5,6 +5,83 @@ "label": "Maya", "is_file": true, "children": [ + { + "key": "imageio", + "type": "dict", + "label": "Color Management (ImageIO)", + "collapsible": true, + "is_group": true, + "children": [ + { + "key": "colorManagementPreference_v2", + "type": "dict", + "label": "Color Management Preference v2 (Maya 2022+)", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Use Color Management Preference v2" + }, + { + "type": "path", + "key": "configFilePath", + "label": "OCIO Config File Path", + "multiplatform": true, + "multipath": true + }, + { + "type": "text", + "key": "renderSpace", + "label": "Rendering Space" + }, + { + "type": "text", + "key": "displayName", + "label": "Display" + }, + { + "type": "text", + "key": "viewName", + "label": "View" + } + ] + }, + { + "key": "colorManagementPreference", + "type": "dict", + "label": "Color Management Preference (legacy)", + "collapsible": true, + "children": [ + { + "type": "path", + "key": "configFilePath", + "label": "OCIO Config File Path", + "multiplatform": true, + "multipath": true + }, + { + "type": "text", + "key": "renderSpace", + "label": "Rendering Space" + }, + { + "type": "text", + "key": "viewTransform", + "label": "Viewer Transform" + } + ] + } + ] + }, + { + "type": "text", + "multiline" : true, + "use_label_wrap": true, + "key": "mel_workspace", + "label": "Maya MEL Workspace" + }, { "type": "dict-modifiable", "key": "ext_mapping", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json index 03d67a57ba..154eca254b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json @@ -46,6 +46,10 @@ } ] }, + { + "type": "schema", + "name": "schema_nuke_imageio" + }, { "type": "dict", "collapsible": true, @@ -308,6 +312,10 @@ "type": "schema_template", "name": "template_workfile_options" }, + { + "type": "schema", + "name": "schema_templated_workfile_build" + }, { "type": "schema", "name": "schema_publish_gui_filter" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json index 49860301b6..b768db30ee 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json @@ -45,9 +45,15 @@ "label": "Set color for publishable layers, set its resulting family and template for subset name. \nCan create flatten image from published instances.(Applicable only for remote publishing!)" }, { - "type": "boolean", "key": "create_flatten_image", - "label": "Create flatten image" + "label": "Create flatten image", + "type": "enum", + "multiselection": false, + "enum_items": [ + { "flatten_with_images": "Flatten with images" }, + { "flatten_only": "Flatten only" }, + { "no": "No" } + ] }, { "type": "text", @@ -125,6 +131,35 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CollectReview", + "label": "Collect Review", + "children": [ + { + "type": "boolean", + "key": "publish", + "label": "Active" + } + ] + }, + { + "type": "dict", + "key": "CollectVersion", + "label": "Collect Version", + "children": [ + { + "type": "label", + "label": "Synchronize version for image and review instances by workfile version." + }, + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + } + ] + }, { "type": "schema_template", "name": "template_publish_plugin", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json b/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json index d26b5c1ccf..09e5791ac4 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json @@ -10,6 +10,11 @@ "key": "level_sequences_for_layouts", "label": "Generate level sequences when loading layouts" }, + { + "type": "boolean", + "key": "delete_unmatched_assets", + "label": "Delete assets that are not matched" + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json index 2ef7a05b21..a81a403bcb 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json @@ -49,6 +49,19 @@ "key": "CollectPublishedFiles", "label": "Collect Published Files", "children": [ + { + "type": "label", + "label": "Select if all versions of published items should be kept same. (As max(published) + 1.)" + }, + { + "type": "boolean", + "key": "sync_next_version", + "label": "Sync next publish version" + }, + { + "type": "label", + "label": "Configure resulting family and tags on representation based on uploaded file and task.
Eg. '.png' is uploaded >> create instance of 'render' family
'Create review' in Tags >> mark representation to create review from." + }, { "type": "dict-modifiable", "collapsible": true, @@ -74,6 +87,9 @@ "label": "Extensions", "object_type": "text" }, + { + "type": "separator" + }, { "type": "list", "key": "families", @@ -84,9 +100,6 @@ "type": "schema", "name": "schema_representation_tags" }, - { - "type": "separator" - }, { "type": "text", "key": "result_family", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json index ef8c907dda..93b6adae6b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json @@ -1,10 +1,14 @@ { "type": "dict", "key": "imageio", - "label": "Color Management and Output Formats", + "label": "Color Management and Output Formats (Deprecated)", "is_file": true, "is_group": true, "children": [ + { + "type": "label", + "label": "These settings are deprecated and have moved to: project_settings/{app}/imageio.
You can right click to copy each host's values and paste them to apply to each host as needed.
Changing these values here will not do anything." + }, { "key": "hiero", "type": "dict", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_blender_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_blender_publish.json new file mode 100644 index 0000000000..58428ad60a --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_blender_publish.json @@ -0,0 +1,113 @@ +{ + "type": "dict", + "collapsible": true, + "key": "publish", + "label": "Publish plugins", + "children": [ + { + "type": "label", + "label": "Validators" + }, + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ValidateCameraZeroKeyframe", + "label": "Validate Camera Zero Keyframe" + } + ] + }, + { + "type": "collapsible-wrap", + "label": "Model", + "children": [ + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ValidateMeshHasUvs", + "label": "Validate Mesh Has UVs" + }, + { + "key": "ValidateMeshNoNegativeScale", + "label": "Validate Mesh No Negative Scale" + }, + { + "key": "ValidateTransformZero", + "label": "Validate Transform Zero" + } + ] + } + ] + }, + { + "type": "splitter" + }, + { + "type": "label", + "label": "Extractors" + }, + { + "type": "dict", + "collapsible": true, + "key": "ExtractBlend", + "label": "Extract Blend", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, + { + "type": "boolean", + "key": "active", + "label": "Active" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + } + ] + }, + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ExtractFBX", + "label": "Extract FBX (model and rig)" + }, + { + "key": "ExtractABC", + "label": "Extract ABC (model and pointcache)" + }, + { + "key": "ExtractBlendAnimation", + "label": "Extract Animation as Blend" + }, + { + "key": "ExtractAnimationFBX", + "label": "Extract Animation as FBX" + }, + { + "key": "ExtractCamera", + "label": "Extract FBX Camera as FBX" + }, + { + "key": "ExtractLayout", + "label": "Extract Layout as JSON" + } + ] + } + ] +} diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index e1aa230b49..773dea1229 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -18,6 +18,27 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "key": "CollectAudio", + "label": "Collect Audio", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "key": "audio_subset_name", + "label": "Name of audio variant", + "type": "text", + "placeholder": "audioMain" + } + ] + }, { "type": "dict", "collapsible": true, @@ -534,6 +555,73 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "PreIntegrateThumbnails", + "label": "Override Integrate Thumbnail Representations", + "is_group": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "label", + "label": "Explicitly set if Thumbnail representation should be integrated into DB.
If no matching profile set, existing state from Host implementation is kept." + }, + { + "type": "list", + "key": "integrate_profiles", + "label": "Integrate profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "task_names", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "key": "subsets", + "label": "Subset names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "boolean", + "key": "integrate_thumbnail", + "label": "Integrate thumbnail" + } + ] + } + } + ] + }, { "type": "dict", "collapsible": true, @@ -642,10 +730,14 @@ ] } }, + { + "type": "label", + "label": "NOTE: Publish template profiles settings were moved to Tools/Publish/Template name profiles. Please move values there." + }, { "type": "list", "key": "template_name_profiles", - "label": "Template name profiles", + "label": "Template name profiles (DEPRECATED)", "use_label_wrap": true, "object_type": { "type": "dict", @@ -750,10 +842,14 @@ "type": "list", "object_type": "text" }, + { + "type": "label", + "label": "NOTE: Hero publish template profiles settings were moved to Tools/Publish/Hero template name profiles. Please move values there." + }, { "type": "list", "key": "template_name_profiles", - "label": "Template name profiles", + "label": "Template name profiles (DEPRECATED)", "use_label_wrap": true, "object_type": { "type": "dict", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json index f8c9482e5f..ba446135e2 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json @@ -238,6 +238,31 @@ } ] } + }, + { + "type": "list", + "key": "workfile_lock_profiles", + "label": "Workfile lock profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "hosts-enum", + "key": "host_name", + "label": "Hosts", + "multiselection": true + }, + { + "type": "splitter" + }, + { + "key": "enabled", + "label": "Enabled", + "type": "boolean" + } + ] + } } ] }, @@ -284,6 +309,102 @@ } } ] + }, + { + "type": "dict", + "key": "publish", + "label": "Publish", + "children": [ + { + "type": "label", + "label": "NOTE: For backwards compatibility can be value empty and in that case are used values from IntegrateAssetNew. This will change in future so please move all values here as soon as possible." + }, + { + "type": "list", + "key": "template_name_profiles", + "label": "Template name profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "task_names", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "template_name", + "label": "Template name" + } + ] + } + }, + { + "type": "list", + "key": "hero_template_name_profiles", + "label": "Hero template name profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "task_names", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "template_name", + "label": "Template name", + "tooltip": "Name of template from Anatomy templates" + } + ] + } + } + ] } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_publish.json new file mode 100644 index 0000000000..aa6eaf5164 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_publish.json @@ -0,0 +1,50 @@ +{ + "type": "dict", + "collapsible": true, + "key": "publish", + "label": "Publish plugins", + "children": [ + { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "key": "ValidateWorkfilePaths", + "label": "Validate Workfile Paths", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, + { + "key": "node_types", + "label": "Node types", + "type": "list", + "object_type": "text" + }, + { + "key": "prohibited_vars", + "label": "Prohibited variables", + "type": "list", + "object_type": "text" + } + ] + }, + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ValidateContainers", + "label": "ValidateContainers" + } + ] + } + ] +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json new file mode 100644 index 0000000000..bab9b604b4 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json @@ -0,0 +1,71 @@ +{ + "type": "list", + "key": "shelves", + "label": "Shelves Manager", + "is_group": true, + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "shelf_set_name", + "label": "Shelf Set Name" + }, + { + "type": "path", + "key": "shelf_set_source_path", + "label": "Shelf Set Path (optional)", + "multipath": false, + "multiplatform": true + }, + { + "type": "list", + "key": "shelf_definition", + "label": "Shelves", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "shelf_name", + "label": "Shelf Name" + }, + { + "type": "list", + "key": "tools_list", + "label": "Tools", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "label", + "label": "Name" + }, + { + "type": "path", + "key": "script", + "label": "Script" + }, + { + "type": "path", + "key": "icon", + "label": "Icon" + }, + { + "type": "text", + "key": "help", + "label": "Help" + } + ] + } + } + ] + } + } + ] + } +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 7a40f349cc..62c33f55fc 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -94,18 +94,6 @@ } ] }, - - { - "type": "dict", - "key": "PanZoom", - "children": [ - { - "type": "boolean", - "key": "pan_zoom", - "label": " Pan Zoom" - } - ] - }, { "type": "splitter" }, @@ -153,19 +141,6 @@ "decimal": 0, "minimum": 0, "maximum": 99999 - }, - { - "type": "number", - "key": "percent", - "label": "percent", - "decimal": 1, - "minimum": 0, - "maximum": 200 - }, - { - "type": "text", - "key": "mode", - "label": "Mode" } ] }, @@ -195,6 +170,11 @@ { "nolights": "No Lights"} ] }, + { + "type": "boolean", + "key": "displayTextures", + "label": "Display Textures" + }, { "type": "number", "key": "textureMaxResolution", @@ -217,11 +197,6 @@ "key": "shadows", "label": "Display Shadows" }, - { - "type": "boolean", - "key": "textures", - "label": "Display Textures" - }, { "type": "boolean", "key": "twoSidedLighting", @@ -369,120 +344,114 @@ { "type": "splitter" }, + { + "type": "label", + "label": "Show" + }, { "type": "boolean", "key": "cameras", - "label": "cameras" + "label": "Cameras" }, { "type": "boolean", "key": "clipGhosts", - "label": "clipGhosts" - }, - { - "type": "boolean", - "key": "controlVertices", - "label": "controlVertices" + "label": "Clip Ghosts" }, { "type": "boolean", "key": "deformers", - "label": "deformers" + "label": "Deformers" }, { "type": "boolean", "key": "dimensions", - "label": "dimensions" + "label": "Dimensions" }, { "type": "boolean", "key": "dynamicConstraints", - "label": "dynamicConstraints" + "label": "Dynamic Constraints" }, { "type": "boolean", "key": "dynamics", - "label": "dynamics" + "label": "Dynamics" }, { "type": "boolean", "key": "fluids", - "label": "fluids" + "label": "Fluids" }, { "type": "boolean", "key": "follicles", - "label": "follicles" + "label": "Follicles" }, { "type": "boolean", "key": "gpuCacheDisplayFilter", - "label": "gpuCacheDisplayFilter" + "label": "GPU Cache" }, { "type": "boolean", "key": "greasePencils", - "label": "greasePencils" + "label": "Grease Pencil" }, { "type": "boolean", "key": "grid", - "label": "grid" + "label": "Grid" }, { "type": "boolean", "key": "hairSystems", - "label": "hairSystems" + "label": "Hair Systems" }, { "type": "boolean", "key": "handles", - "label": "handles" + "label": "Handles" }, { "type": "boolean", - "key": "hud", - "label": "hud" - }, - { - "type": "boolean", - "key": "hulls", - "label": "hulls" + "key": "headsUpDisplay", + "label": "HUD" }, { "type": "boolean", "key": "ikHandles", - "label": "ikHandles" + "label": "IK Handles" }, { "type": "boolean", "key": "imagePlane", - "label": "imagePlane" + "label": "Image Planes" }, { "type": "boolean", "key": "joints", - "label": "joints" + "label": "Joints" }, { "type": "boolean", "key": "lights", - "label": "lights" + "label": "Lights" }, { "type": "boolean", "key": "locators", - "label": "locators" + "label": "Locators" }, { "type": "boolean", "key": "manipulators", - "label": "manipulators" + "label": "Manipulators" }, { "type": "boolean", "key": "motionTrails", - "label": "motionTrails" + "label": "Motion Trails" }, { "type": "boolean", @@ -499,50 +468,65 @@ "key": "nRigids", "label": "nRigids" }, + { + "type": "boolean", + "key": "controlVertices", + "label": "NURBS CVs" + }, { "type": "boolean", "key": "nurbsCurves", - "label": "nurbsCurves" + "label": "NURBS Curves" + }, + { + "type": "boolean", + "key": "hulls", + "label": "NURBS Hulls" }, { "type": "boolean", "key": "nurbsSurfaces", - "label": "nurbsSurfaces" + "label": "NURBS Surfaces" }, { "type": "boolean", "key": "particleInstancers", - "label": "particleInstancers" + "label": "Particle Instancers" }, { "type": "boolean", "key": "pivots", - "label": "pivots" + "label": "Pivots" }, { "type": "boolean", "key": "planes", - "label": "planes" + "label": "Planes" }, { "type": "boolean", "key": "pluginShapes", - "label": "pluginShapes" + "label": "Plugin Shapes" }, { "type": "boolean", "key": "polymeshes", - "label": "polymeshes" + "label": "Polygons" }, { "type": "boolean", "key": "strokes", - "label": "strokes" + "label": "Strokes" }, { "type": "boolean", "key": "subdivSurfaces", - "label": "subdivSurfaces" + "label": "Subdiv Surfaces" + }, + { + "type": "boolean", + "key": "textures", + "label": "Texture Placements" } ] }, @@ -555,47 +539,47 @@ { "type": "boolean", "key": "displayGateMask", - "label": "displayGateMask" + "label": "Display Gate Mask" }, { "type": "boolean", "key": "displayResolution", - "label": "displayResolution" + "label": "Display Resolution" }, { "type": "boolean", "key": "displayFilmGate", - "label": "displayFilmGate" + "label": "Display Film Gate" }, { "type": "boolean", "key": "displayFieldChart", - "label": "displayFieldChart" + "label": "Display Field Chart" }, { "type": "boolean", "key": "displaySafeAction", - "label": "displaySafeAction" + "label": "Display Safe Action" }, { "type": "boolean", "key": "displaySafeTitle", - "label": "displaySafeTitle" + "label": "Display Safe Title" }, { "type": "boolean", "key": "displayFilmPivot", - "label": "displayFilmPivot" + "label": "Display Film Pivot" }, { "type": "boolean", "key": "displayFilmOrigin", - "label": "displayFilmOrigin" + "label": "Display Film Origin" }, { "type": "number", "key": "overscan", - "label": "overscan", + "label": "Overscan", "decimal": 1, "minimum": 0, "maximum": 10 diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 431add28df..bc6520474d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -127,6 +127,41 @@ "key": "write_color_sets", "label": "Write Color Sets" }, + { + "type": "boolean", + "key": "write_face_sets", + "label": "Write Face Sets" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "CreateModel", + "label": "Create Model", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "write_color_sets", + "label": "Write Color Sets" + }, + { + "type": "boolean", + "key": "write_face_sets", + "label": "Write Face Sets" + }, { "type": "list", "key": "defaults", @@ -152,6 +187,11 @@ "key": "write_color_sets", "label": "Write Color Sets" }, + { + "type": "boolean", + "key": "write_face_sets", + "label": "Write Face Sets" + }, { "type": "list", "key": "defaults", @@ -160,7 +200,7 @@ } ] }, - + { "type": "schema_template", "name": "template_create_plugin", @@ -197,10 +237,6 @@ "key": "CreateMayaScene", "label": "Create Maya Scene" }, - { - "key": "CreateModel", - "label": "Create Model" - }, { "key": "CreateRenderSetup", "label": "Create Render Setup" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json index 6ee02ca78f..0cbb684fc6 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -140,7 +140,7 @@ }, { "type": "label", - "label": "Add additional options - put attribute and value, like AASamples" + "label": "Add additional options - put attribute and value, like defaultArnoldRenderOptions.AASamples = 4" }, { "type": "dict-modifiable", @@ -276,7 +276,7 @@ }, { "type": "label", - "label": "Add additional options - put attribute and value, like aaFilterSize" + "label": "Add additional options - put attribute and value, like vraySettings.aaFilterSize = 1.5" }, { "type": "dict-modifiable", @@ -405,7 +405,7 @@ }, { "type": "label", - "label": "Add additional options - put attribute and value, like reflectionMaxTraceDepth" + "label": "Add additional options - put attribute and value, like redshiftOptions.reflectionMaxTraceDepth = 3" }, { "type": "dict-modifiable", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_imageio.json new file mode 100644 index 0000000000..52db853ef6 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_imageio.json @@ -0,0 +1,254 @@ +{ + "key": "imageio", + "type": "dict", + "label": "Color Management (ImageIO)", + "checkbox_key": "enabled", + "collapsible": true, + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "key": "viewer", + "type": "dict", + "label": "Viewer", + "collapsible": false, + "children": [ + { + "type": "text", + "key": "viewerProcess", + "label": "Viewer Process" + } + ] + }, + { + "key": "baking", + "type": "dict", + "label": "Extract-review baking profile", + "collapsible": false, + "children": [ + { + "type": "text", + "key": "viewerProcess", + "label": "Viewer Process" + } + ] + }, + { + "key": "workfile", + "type": "dict", + "label": "Workfile", + "collapsible": false, + "children": [ + { + "type": "form", + "children": [ + { + "type": "enum", + "key": "colorManagement", + "label": "color management", + "enum_items": [ + { + "Nuke": "Nuke" + }, + { + "OCIO": "OCIO" + } + ] + }, + { + "type": "enum", + "key": "OCIO_config", + "label": "OpenColorIO Config", + "enum_items": [ + { + "nuke-default": "nuke-default" + }, + { + "spi-vfx": "spi-vfx" + }, + { + "spi-anim": "spi-anim" + }, + { + "aces_0.1.1": "aces_0.1.1" + }, + { + "aces_0.7.1": "aces_0.7.1" + }, + { + "aces_1.0.1": "aces_1.0.1" + }, + { + "aces_1.0.3": "aces_1.0.3" + }, + { + "aces_1.1": "aces_1.1" + }, + { + "aces_1.2": "aces_1.2" + }, + { + "custom": "custom" + } + ] + }, + { + "type": "path", + "key": "customOCIOConfigPath", + "label": "Custom OCIO config path", + "multiplatform": true, + "multipath": true + }, + { + "type": "text", + "key": "workingSpaceLUT", + "label": "Working Space" + }, + { + "type": "text", + "key": "monitorLut", + "label": "monitor" + }, + { + "type": "text", + "key": "int8Lut", + "label": "8-bit files" + }, + { + "type": "text", + "key": "int16Lut", + "label": "16-bit files" + }, + { + "type": "text", + "key": "logLut", + "label": "log files" + }, + { + "type": "text", + "key": "floatLut", + "label": "float files" + } + ] + } + ] + }, + { + "key": "nodes", + "type": "dict", + "label": "Nodes", + "collapsible": true, + "children": [ + { + "key": "requiredNodes", + "type": "list", + "label": "Plugin required", + "object_type": { + "type": "dict", + "children": [ + { + "type": "list", + "key": "plugins", + "label": "Used in plugins", + "object_type": { + "type": "text", + "key": "pluginClass" + } + }, + { + "type": "text", + "key": "nukeNodeClass", + "label": "Nuke Node Class" + }, + { + "type": "schema_template", + "name": "template_nuke_knob_inputs", + "template_data": [ + { + "label": "Knobs", + "key": "knobs" + } + ] + } + + ] + } + }, + { + "type": "splitter" + }, + { + "type": "list", + "key": "overrideNodes", + "label": "Plugin's node overrides", + "object_type": { + "type": "dict", + "children": [ + { + "type": "list", + "key": "plugins", + "label": "Used in plugins", + "object_type": { + "type": "text", + "key": "pluginClass" + } + }, + { + "type": "text", + "key": "nukeNodeClass", + "label": "Nuke Node Class" + }, + { + "key": "subsets", + "label": "Subsets", + "type": "list", + "object_type": "text" + }, + { + "type": "schema_template", + "name": "template_nuke_knob_inputs", + "template_data": [ + { + "label": "Knobs overrides", + "key": "knobs" + } + ] + } + ] + } + } + ] + }, + { + "key": "regexInputs", + "type": "dict", + "label": "Colorspace on Inputs by regex detection", + "collapsible": true, + "children": [ + { + "type": "list", + "key": "inputs", + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "regex", + "label": "Regex" + }, + { + "type": "text", + "key": "colorspace", + "label": "Colorspace" + } + ] + } + } + ] + } + ] +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json index 575bfe79e7..e5827a92c4 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json @@ -61,8 +61,8 @@ "name": "template_publish_plugin", "template_data": [ { - "key": "ValidateInstanceInContext", - "label": "Validate Instance In Context" + "key": "ValidateCorrectAssetName", + "label": "Validate Correct Asset name" } ] }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json index a591facf98..99a29beb27 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json @@ -17,7 +17,7 @@ "type": "task-types-enum" }, { - "key": "tasks", + "key": "task_names", "label": "Task names", "type": "list", "object_type": "text" diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index 79ec6248ac..def8c16ea7 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -831,7 +831,10 @@ class MongoSettingsHandler(SettingsHandler): data_cache.update_last_saved_info(last_saved_info) self._save_project_data( - project_name, self._project_settings_key, data_cache + project_name, + self._project_settings_key, + data_cache, + last_saved_info ) def save_project_anatomy(self, project_name, anatomy_data): @@ -849,8 +852,16 @@ class MongoSettingsHandler(SettingsHandler): self._save_project_anatomy_data(project_name, data_cache) else: + last_saved_info = SettingsStateInfo.create_new( + self._current_version, + PROJECT_ANATOMY_KEY, + project_name + ) self._save_project_data( - project_name, self._project_anatomy_key, data_cache + project_name, + self._project_anatomy_key, + data_cache, + last_saved_info ) @classmethod @@ -931,14 +942,16 @@ class MongoSettingsHandler(SettingsHandler): {"$set": update_dict} ) - def _save_project_data(self, project_name, doc_type, data_cache): + def _save_project_data( + self, project_name, doc_type, data_cache, last_saved_info + ): is_default = bool(project_name is None) query_filter = { "type": doc_type, "is_default": is_default, "version": self._current_version } - last_saved_info = data_cache.last_saved_info + new_project_settings_doc = { "type": doc_type, "data": data_cache.data, @@ -946,6 +959,7 @@ class MongoSettingsHandler(SettingsHandler): "version": self._current_version, "last_saved_info": last_saved_info.to_data() } + if not is_default: query_filter["project_name"] = project_name new_project_settings_doc["project_name"] = project_name diff --git a/openpype/style/__init__.py b/openpype/style/__init__.py index b2a1a4ce6c..473fb42bb5 100644 --- a/openpype/style/__init__.py +++ b/openpype/style/__init__.py @@ -1,4 +1,5 @@ import os +import copy import json import collections import six @@ -19,6 +20,9 @@ class _Cache: disabled_entity_icon_color = None deprecated_entity_font_color = None + colors_data = None + objected_colors = None + def get_style_image_path(image_name): # All filenames are lowered @@ -46,8 +50,11 @@ def _get_colors_raw_data(): def get_colors_data(): """Only color data from stylesheet data.""" - data = _get_colors_raw_data() - return data.get("color") or {} + if _Cache.colors_data is None: + data = _get_colors_raw_data() + color_data = data.get("color") or {} + _Cache.colors_data = color_data + return copy.deepcopy(_Cache.colors_data) def _convert_color_values_to_objects(value): @@ -75,17 +82,38 @@ def _convert_color_values_to_objects(value): return parse_color(value) -def get_objected_colors(): +def get_objected_colors(*keys): """Colors parsed from stylesheet data into color definitions. + You can pass multiple arguments to get a key from the data dict's colors. + Because this functions returns a deep copy of the cached data this allows + a much smaller dataset to be copied and thus result in a faster function. + It is however a micro-optimization in the area of 0.001s and smaller. + + For example: + >>> get_colors_data() # copy of full colors dict + >>> get_colors_data("font") + >>> get_colors_data("loader", "asset-view") + + Args: + *keys: Each key argument will return a key nested deeper in the + objected colors data. + Returns: - dict: Parsed color objects by keys in data. + Any: Parsed color objects by keys in data. """ - colors_data = get_colors_data() - output = {} - for key, value in colors_data.items(): - output[key] = _convert_color_values_to_objects(value) - return output + if _Cache.objected_colors is None: + colors_data = get_colors_data() + output = {} + for key, value in colors_data.items(): + output[key] = _convert_color_values_to_objects(value) + + _Cache.objected_colors = output + + output = _Cache.objected_colors + for key in keys: + output = output[key] + return copy.deepcopy(output) def _load_stylesheet(): diff --git a/openpype/style/color_defs.py b/openpype/style/color_defs.py index 0f4e145ca0..f1eab38c24 100644 --- a/openpype/style/color_defs.py +++ b/openpype/style/color_defs.py @@ -296,7 +296,7 @@ class HSLColor: if "%" in sat_str: sat = float(sat_str.rstrip("%")) / 100 else: - sat = float(sat) + sat = float(sat_str) if "%" in light_str: light = float(light_str.rstrip("%")) / 100 @@ -337,8 +337,8 @@ class HSLAColor: as float (0-1 range). Examples: - "hsl(27, 0.7, 0.3)" - "hsl(27, 70%, 30%)" + "hsla(27, 0.7, 0.3, 0.5)" + "hsla(27, 70%, 30%, 0.5)" """ def __init__(self, value): modified_color = value.lower().strip() @@ -350,7 +350,7 @@ class HSLAColor: if "%" in sat_str: sat = float(sat_str.rstrip("%")) / 100 else: - sat = float(sat) + sat = float(sat_str) if "%" in light_str: light = float(light_str.rstrip("%")) / 100 diff --git a/openpype/style/data.json b/openpype/style/data.json index 15d9472e3e..fef69071ed 100644 --- a/openpype/style/data.json +++ b/openpype/style/data.json @@ -20,7 +20,7 @@ "color": { "font": "#D3D8DE", "font-hover": "#F0F2F5", - "font-disabled": "#99A3B2", + "font-disabled": "#5b6779", "font-view-selection": "#ffffff", "font-view-hover": "#F0F2F5", @@ -89,8 +89,10 @@ }, "publisher": { "error": "#AA5050", + "crash": "#FF6432", "success": "#458056", "warning": "#ffc671", + "tab-bg": "#16191d", "list-view-group": { "bg": "#434a56", "bg-hover": "rgba(168, 175, 189, 0.3)", diff --git a/openpype/style/style.css b/openpype/style/style.css index 72d12a9230..a6818a5792 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -856,6 +856,33 @@ QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical { } /* New Create/Publish UI */ +PublisherTabsWidget { + background: {color:publisher:tab-bg}; +} + +PublisherTabBtn { + border-radius: 0px; + background: {color:bg-inputs}; + font-size: 9pt; + font-weight: regular; + padding: 0.5em 1em 0.5em 1em; +} + +PublisherTabBtn:disabled { + background: {color:bg-inputs}; +} + +PublisherTabBtn:hover { + background: {color:bg-buttons}; +} + +PublisherTabBtn[active="1"] { + background: {color:bg}; +} +PublisherTabBtn[active="1"]:hover { + background: {color:bg}; +} + #CreatorDetailedDescription { padding-left: 5px; padding-right: 5px; @@ -865,18 +892,16 @@ QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical { } #CreateDialogHelpButton { - background: rgba(255, 255, 255, 31); + background: {color:bg-buttons}; border-top-left-radius: 0.2em; border-bottom-left-radius: 0.2em; border-top-right-radius: 0; border-bottom-right-radius: 0; - font-size: 10pt; font-weight: bold; - padding: 0px; } #CreateDialogHelpButton:hover { - background: rgba(255, 255, 255, 63); + background: {color:bg-button-hover}; } #CreateDialogHelpButton QWidget { background: transparent; @@ -944,38 +969,26 @@ VariantInputsWidget QToolButton { color: {color:publisher:error}; } -#PublishFrame { - background: rgba(0, 0, 0, 127); -} -#PublishFrame[state="1"] { - background: rgb(22, 25, 29); -} -#PublishFrame[state="2"] { - background: {color:bg}; -} - #PublishInfoFrame { background: {color:bg}; - border: 2px solid black; border-radius: 0.3em; } - -#PublishInfoFrame[state="-1"] { - background: rgb(194, 226, 236); -} - #PublishInfoFrame[state="0"] { - background: {color:publisher:error}; + background: {color:publisher:success}; } #PublishInfoFrame[state="1"] { - background: {color:publisher:success}; + background: {color:publisher:crash}; } #PublishInfoFrame[state="2"] { background: {color:publisher:warning}; } +#PublishInfoFrame[state="3"], #PublishInfoFrame[state="4"] { + background: rgb(194, 226, 236); +} + #PublishInfoFrame QLabel { color: black; font-style: bold; @@ -989,6 +1002,11 @@ VariantInputsWidget QToolButton { font-size: 13pt; } +ValidationArtistMessage QLabel { + font-size: 20pt; + font-weight: bold; +} + #ValidationActionButton { border-radius: 0.2em; padding: 4px 6px 4px 6px; @@ -1005,17 +1023,16 @@ VariantInputsWidget QToolButton { } #ValidationErrorTitleFrame { - background: {color:bg-inputs}; - border-left: 4px solid transparent; + border-radius: 0.2em; + background: {color:bg-buttons}; } #ValidationErrorTitleFrame:hover { - border-left-color: {color:border}; + background: {color:bg-buttons-hover}; } #ValidationErrorTitleFrame[selected="1"] { - background: {color:bg}; - border-left-color: {palette:blue-light}; + background: {color:bg-view-selection}; } #ValidationErrorInstanceList { @@ -1068,7 +1085,7 @@ VariantInputsWidget QToolButton { border-color: {color:publisher:error}; } -#PublishProgressBar[state="0"]::chunk { +#PublishProgressBar[state="1"]::chunk, #PublishProgressBar[state="4"]::chunk { background: {color:bg-buttons}; } diff --git a/openpype/tools/creator/window.py b/openpype/tools/creator/window.py index a3937d6a40..e2396ed29e 100644 --- a/openpype/tools/creator/window.py +++ b/openpype/tools/creator/window.py @@ -6,7 +6,7 @@ from Qt import QtWidgets, QtCore from openpype.client import get_asset_by_name, get_subsets from openpype import style -from openpype.api import get_current_project_settings +from openpype.settings import get_current_project_settings from openpype.tools.utils.lib import qt_app_context from openpype.pipeline import legacy_io from openpype.pipeline.create import ( diff --git a/openpype/tools/launcher/actions.py b/openpype/tools/launcher/actions.py index 546bda1c34..34d06f72cc 100644 --- a/openpype/tools/launcher/actions.py +++ b/openpype/tools/launcher/actions.py @@ -4,8 +4,9 @@ from Qt import QtWidgets, QtGui from openpype import PLUGINS_DIR from openpype import style -from openpype.api import Logger, resources +from openpype import resources from openpype.lib import ( + Logger, ApplictionExecutableNotFound, ApplicationLaunchFailed ) diff --git a/openpype/tools/launcher/lib.py b/openpype/tools/launcher/lib.py index c1392b7b8f..68e57c6b92 100644 --- a/openpype/tools/launcher/lib.py +++ b/openpype/tools/launcher/lib.py @@ -1,7 +1,7 @@ import os from Qt import QtGui import qtawesome -from openpype.api import resources +from openpype import resources ICON_CACHE = {} NOT_FOUND = type("NotFound", (object, ), {}) diff --git a/openpype/tools/launcher/models.py b/openpype/tools/launcher/models.py index 6d40d21f96..6e3b531018 100644 --- a/openpype/tools/launcher/models.py +++ b/openpype/tools/launcher/models.py @@ -281,18 +281,25 @@ class ActionModel(QtGui.QStandardItemModel): if not action_item: return - action = action_item.data(ACTION_ROLE) - actual_data = self._prepare_compare_data(action) + actions = action_item.data(ACTION_ROLE) + if not isinstance(actions, list): + actions = [actions] + + action_actions_data = [ + self._prepare_compare_data(action) + for action in actions + ] stored = self.launcher_registry.get_item("force_not_open_workfile") - if is_checked: - stored.append(actual_data) - else: - final_values = [] - for config in stored: - if config != actual_data: - final_values.append(config) - stored = final_values + for actual_data in action_actions_data: + if is_checked: + stored.append(actual_data) + else: + final_values = [] + for config in stored: + if config != actual_data: + final_values.append(config) + stored = final_values self.launcher_registry.set_item("force_not_open_workfile", stored) self.launcher_registry._get_item.cache_clear() @@ -329,21 +336,24 @@ class ActionModel(QtGui.QStandardItemModel): item (QStandardItem) stored (list) of dict """ - action = item.data(ACTION_ROLE) - if not self.is_application_action(action): + + actions = item.data(ACTION_ROLE) + if not isinstance(actions, list): + actions = [actions] + + if not self.is_application_action(actions[0]): return False - actual_data = self._prepare_compare_data(action) + action_actions_data = [ + self._prepare_compare_data(action) + for action in actions + ] for config in stored: - if config == actual_data: + if config in action_actions_data: return True - return False def _prepare_compare_data(self, action): - if isinstance(action, list) and action: - action = action[0] - compare_data = {} if action and action.label: compare_data = { diff --git a/openpype/tools/launcher/widgets.py b/openpype/tools/launcher/widgets.py index 62599664fe..774ceb659d 100644 --- a/openpype/tools/launcher/widgets.py +++ b/openpype/tools/launcher/widgets.py @@ -312,11 +312,12 @@ class ActionBar(QtWidgets.QWidget): is_group = index.data(GROUP_ROLE) is_variant_group = index.data(VARIANT_GROUP_ROLE) + force_not_open_workfile = index.data(FORCE_NOT_OPEN_WORKFILE_ROLE) if not is_group and not is_variant_group: action = index.data(ACTION_ROLE) # Change data of application action if issubclass(action, ApplicationAction): - if index.data(FORCE_NOT_OPEN_WORKFILE_ROLE): + if force_not_open_workfile: action.data["start_last_workfile"] = False else: action.data.pop("start_last_workfile", None) @@ -385,10 +386,18 @@ class ActionBar(QtWidgets.QWidget): menu.addMenu(sub_menu) result = menu.exec_(QtGui.QCursor.pos()) - if result: - action = actions_mapping[result] - self._start_animation(index) - self.action_clicked.emit(action) + if not result: + return + + action = actions_mapping[result] + if issubclass(action, ApplicationAction): + if force_not_open_workfile: + action.data["start_last_workfile"] = False + else: + action.data.pop("start_last_workfile", None) + + self._start_animation(index) + self.action_clicked.emit(action) class ActionHistory(QtWidgets.QPushButton): diff --git a/openpype/tools/launcher/window.py b/openpype/tools/launcher/window.py index dab6949613..a9eaa932bb 100644 --- a/openpype/tools/launcher/window.py +++ b/openpype/tools/launcher/window.py @@ -4,7 +4,7 @@ import logging from Qt import QtWidgets, QtCore, QtGui from openpype import style -from openpype.api import resources +from openpype import resources from openpype.pipeline import AvalonMongoDB import qtawesome diff --git a/openpype/tools/loader/delegates.py b/openpype/tools/loader/delegates.py new file mode 100644 index 0000000000..e6663d48f1 --- /dev/null +++ b/openpype/tools/loader/delegates.py @@ -0,0 +1,28 @@ +from Qt import QtWidgets, QtGui, QtCore + + +class LoadedInSceneDelegate(QtWidgets.QStyledItemDelegate): + """Delegate for Loaded in Scene state columns. + + Shows "yes" or "no" for True or False values + Colorizes green or dark grey based on True or False values + + """ + + def __init__(self, *args, **kwargs): + super(LoadedInSceneDelegate, self).__init__(*args, **kwargs) + self._colors = { + True: QtGui.QColor(80, 170, 80), + False: QtGui.QColor(90, 90, 90) + } + + def displayText(self, value, locale): + return "yes" if value else "no" + + def initStyleOption(self, option, index): + super(LoadedInSceneDelegate, self).initStyleOption(option, index) + + # Colorize based on value + value = index.data(QtCore.Qt.DisplayRole) + color = self._colors[bool(value)] + option.palette.setBrush(QtGui.QPalette.Text, color) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 3ce44ea6c8..77a8669c46 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -17,6 +17,7 @@ from openpype.client import ( get_representations ) from openpype.pipeline import ( + registered_host, HeroVersionType, schema, ) @@ -24,6 +25,7 @@ from openpype.pipeline import ( from openpype.style import get_default_entity_icon_color from openpype.tools.utils.models import TreeModel, Item from openpype.tools.utils import lib +from openpype.host import ILoadHost from openpype.modules import ModulesManager from openpype.tools.utils.constants import ( @@ -136,6 +138,7 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "duration", "handles", "step", + "loaded_in_scene", "repre_info" ] @@ -150,6 +153,7 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "duration": "Duration", "handles": "Handles", "step": "Step", + "loaded_in_scene": "In scene", "repre_info": "Availability" } @@ -231,8 +235,14 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): self._doc_fetching_stop = False self._doc_payload = {} - self.doc_fetched.connect(self._on_doc_fetched) + self._host = registered_host() + self._loaded_representation_ids = set() + # Refresh loaded scene containers only every 3 seconds at most + self._host_loaded_refresh_timeout = 3 + self._host_loaded_refresh_time = 0 + + self.doc_fetched.connect(self._on_doc_fetched) self.refresh() def get_item_by_id(self, item_id): @@ -272,15 +282,17 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): # update availability on active site when version changes if self.sync_server.enabled and version_doc: - repre_info = self.sync_server.get_repre_info_for_versions( - project_name, - [version_doc["_id"]], - self.active_site, - self.remote_site + repres_info = list( + self.sync_server.get_repre_info_for_versions( + project_name, + [version_doc["_id"]], + self.active_site, + self.remote_site + ) ) - if repre_info: + if repres_info: version_doc["data"].update( - self._get_repre_dict(repre_info[0])) + self._get_repre_dict(repres_info[0])) self.set_version(index, version_doc) @@ -472,29 +484,56 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): last_versions_by_subset_id[subset_id] = hero_version - repre_info = {} - if self.sync_server.enabled: - version_ids = set() - for _subset_id, doc in last_versions_by_subset_id.items(): - version_ids.add(doc["_id"]) + # Check loaded subsets + loaded_subset_ids = set() + ids = self._loaded_representation_ids + if ids: + if self._doc_fetching_stop: + return - repres = self.sync_server.get_repre_info_for_versions( + # Get subset ids from loaded representations in workfile + # todo: optimize with aggregation query to distinct subset id + representations = get_representations(project_name, + representation_ids=ids, + fields=["parent"]) + version_ids = set(repre["parent"] for repre in representations) + versions = get_versions(project_name, + version_ids=version_ids, + fields=["parent"]) + loaded_subset_ids = set(version["parent"] for version in versions) + + if self._doc_fetching_stop: + return + + repre_info_by_version_id = {} + if self.sync_server.enabled: + versions_by_id = {} + for _subset_id, doc in last_versions_by_subset_id.items(): + versions_by_id[doc["_id"]] = doc + + repres_info = self.sync_server.get_repre_info_for_versions( project_name, - list(version_ids), self.active_site, self.remote_site + list(versions_by_id.keys()), + self.active_site, + self.remote_site ) - for repre in repres: + for repre_info in repres_info: if self._doc_fetching_stop: return + + version_id = repre_info["_id"] + doc = versions_by_id[version_id] doc["active_provider"] = self.active_provider doc["remote_provider"] = self.remote_provider - repre_info[repre["_id"]] = repre + repre_info_by_version_id[version_id] = repre_info self._doc_payload = { "asset_docs_by_id": asset_docs_by_id, "subset_docs_by_id": subset_docs_by_id, "subset_families": subset_families, "last_versions_by_subset_id": last_versions_by_subset_id, - "repre_info_by_version_id": repre_info + "repre_info_by_version_id": repre_info_by_version_id, + "subsets_loaded_by_id": loaded_subset_ids } self.doc_fetched.emit() @@ -526,6 +565,20 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): self.doc_fetched.emit() return + # Collect scene container representations to compare loaded state + # This runs in the main thread because it involves the host DCC + if self._host: + time_since_refresh = time.time() - self._host_loaded_refresh_time + if time_since_refresh > self._host_loaded_refresh_timeout: + if isinstance(self._host, ILoadHost): + containers = self._host.get_containers() + else: + containers = self._host.ls() + + repre_ids = {con.get("representation") for con in containers} + self._loaded_representation_ids = repre_ids + self._host_loaded_refresh_time = time.time() + self.fetch_subset_and_version() def _on_doc_fetched(self): @@ -547,6 +600,10 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "repre_info_by_version_id" ) + subsets_loaded_by_id = self._doc_payload.get( + "subsets_loaded_by_id" + ) + if ( asset_docs_by_id is None or subset_docs_by_id is None @@ -561,7 +618,8 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): asset_docs_by_id, subset_docs_by_id, last_versions_by_subset_id, - repre_info_by_version_id + repre_info_by_version_id, + subsets_loaded_by_id ) self.endResetModel() self.refreshed.emit(True) @@ -589,8 +647,12 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): return merge_group def _fill_subset_items( - self, asset_docs_by_id, subset_docs_by_id, last_versions_by_subset_id, - repre_info_by_version_id + self, + asset_docs_by_id, + subset_docs_by_id, + last_versions_by_subset_id, + repre_info_by_version_id, + subsets_loaded_by_id ): _groups_tuple = self.groups_config.split_subsets_for_groups( subset_docs_by_id.values(), self._grouping @@ -614,6 +676,35 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "index": self.index(group_item.row(), 0) } + def _add_subset_item(subset_doc, parent_item, parent_index): + last_version = last_versions_by_subset_id.get( + subset_doc["_id"] + ) + # do not show subset without version + if not last_version: + return + + data = copy.deepcopy(subset_doc) + data["subset"] = subset_doc["name"] + + asset_id = subset_doc["parent"] + data["asset"] = asset_docs_by_id[asset_id]["name"] + + data["last_version"] = last_version + data["loaded_in_scene"] = subset_doc["_id"] in subsets_loaded_by_id + + # Sync server data + data.update( + self._get_last_repre_info(repre_info_by_version_id, + last_version["_id"])) + + item = Item() + item.update(data) + self.add_child(item, parent_item) + + index = self.index(item.row(), 0, parent_index) + self.set_version(index, last_version) + subset_counter = 0 for group_name, subset_docs_by_name in subset_docs_by_group.items(): parent_item = group_item_by_name[group_name]["item"] @@ -636,31 +727,9 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): _parent_index = parent_index for subset_doc in subset_docs: - asset_id = subset_doc["parent"] - - data = copy.deepcopy(subset_doc) - data["subset"] = subset_name - data["asset"] = asset_docs_by_id[asset_id]["name"] - - last_version = last_versions_by_subset_id.get( - subset_doc["_id"] - ) - data["last_version"] = last_version - - # do not show subset without version - if not last_version: - continue - - data.update( - self._get_last_repre_info(repre_info_by_version_id, - last_version["_id"])) - - item = Item() - item.update(data) - self.add_child(item, _parent_item) - - index = self.index(item.row(), 0, _parent_index) - self.set_version(index, last_version) + _add_subset_item(subset_doc, + parent_item=_parent_item, + parent_index=_parent_index) for subset_name in sorted(subset_docs_without_group.keys()): subset_docs = subset_docs_without_group[subset_name] @@ -675,31 +744,9 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): subset_counter += 1 for subset_doc in subset_docs: - asset_id = subset_doc["parent"] - - data = copy.deepcopy(subset_doc) - data["subset"] = subset_name - data["asset"] = asset_docs_by_id[asset_id]["name"] - - last_version = last_versions_by_subset_id.get( - subset_doc["_id"] - ) - data["last_version"] = last_version - - # do not show subset without version - if not last_version: - continue - - data.update( - self._get_last_repre_info(repre_info_by_version_id, - last_version["_id"])) - - item = Item() - item.update(data) - self.add_child(item, parent_item) - - index = self.index(item.row(), 0, parent_index) - self.set_version(index, last_version) + _add_subset_item(subset_doc, + parent_item=parent_item, + parent_index=parent_index) def data(self, index, role): if not index.isValid(): diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index 597c35e89b..d37ce500e0 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -17,6 +17,7 @@ from openpype.client import ( get_thumbnail_id_from_source, get_thumbnail, ) +from openpype.client.operations import OperationsSession, REMOVED_VALUE from openpype.pipeline import HeroVersionType, Anatomy from openpype.pipeline.thumbnail import get_thumbnail_binary from openpype.pipeline.load import ( @@ -57,6 +58,7 @@ from .model import ( ITEM_ID_ROLE ) from . import lib +from .delegates import LoadedInSceneDelegate from openpype.tools.utils.constants import ( LOCAL_PROVIDER_ROLE, @@ -168,6 +170,7 @@ class SubsetWidget(QtWidgets.QWidget): ("duration", 60), ("handles", 55), ("step", 10), + ("loaded_in_scene", 25), ("repre_info", 65) ) @@ -233,6 +236,10 @@ class SubsetWidget(QtWidgets.QWidget): column = model.Columns.index("repre_info") view.setItemDelegateForColumn(column, avail_delegate) + loaded_in_scene_delegate = LoadedInSceneDelegate(view) + column = model.Columns.index("loaded_in_scene") + view.setItemDelegateForColumn(column, loaded_in_scene_delegate) + layout = QtWidgets.QVBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) layout.addLayout(top_bar_layout) @@ -614,26 +621,30 @@ class SubsetWidget(QtWidgets.QWidget): box.show() def group_subsets(self, name, asset_ids, items): - field = "data.subsetGroup" + subset_ids = { + item["_id"] + for item in items + if item.get("_id") + } + if not subset_ids: + return if name: - update = {"$set": {field: name}} self.echo("Group subsets to '%s'.." % name) else: - update = {"$unset": {field: ""}} self.echo("Ungroup subsets..") - subsets = list() - for item in items: - subsets.append(item["subset"]) + project_name = self.dbcon.active_project() + op_session = OperationsSession() + for subset_id in subset_ids: + op_session.update_entity( + project_name, + "subset", + subset_id, + {"data.subsetGroup": name or REMOVED_VALUE} + ) - for asset_id in asset_ids: - filtr = { - "type": "subset", - "parent": asset_id, - "name": {"$in": subsets}, - } - self.dbcon.update_many(filtr, update) + op_session.commit() def echo(self, message): print(message) @@ -1245,7 +1256,11 @@ class RepresentationWidget(QtWidgets.QWidget): repre_doc["parent"] for repre_doc in repre_docs ] - version_docs = get_versions(project_name, version_ids=version_ids) + version_docs = get_versions( + project_name, + version_ids=version_ids, + hero=True + ) version_docs_by_id = {} version_docs_by_subset_id = collections.defaultdict(list) diff --git a/openpype/tools/project_manager/project_manager/model.py b/openpype/tools/project_manager/project_manager/model.py index 3aaee75698..6f40140e5e 100644 --- a/openpype/tools/project_manager/project_manager/model.py +++ b/openpype/tools/project_manager/project_manager/model.py @@ -13,10 +13,8 @@ from openpype.client import ( get_assets, get_asset_ids_with_subsets, ) -from openpype.lib import ( - CURRENT_DOC_SCHEMAS, - PypeLogger, -) +from openpype.client.operations import CURRENT_ASSET_DOC_SCHEMA +from openpype.lib import Logger from .constants import ( IDENTIFIER_ROLE, @@ -203,7 +201,7 @@ class HierarchyModel(QtCore.QAbstractItemModel): @property def log(self): if self._log is None: - self._log = PypeLogger.get_logger("ProjectManagerModel") + self._log = Logger.get_logger("ProjectManagerModel") return self._log @property @@ -1961,7 +1959,7 @@ class AssetItem(BaseItem): } schema_name = ( self._origin_asset_doc.get("schema") - or CURRENT_DOC_SCHEMAS["asset"] + or CURRENT_ASSET_DOC_SCHEMA ) doc = { diff --git a/openpype/tools/project_manager/project_manager/widgets.py b/openpype/tools/project_manager/project_manager/widgets.py index 371d1ba2ef..4bc968347a 100644 --- a/openpype/tools/project_manager/project_manager/widgets.py +++ b/openpype/tools/project_manager/project_manager/widgets.py @@ -1,14 +1,13 @@ import re -from openpype.client import get_projects +from openpype.client import get_projects, create_project from .constants import ( NAME_ALLOWED_SYMBOLS, NAME_REGEX ) -from openpype.lib import ( - create_project, +from openpype.client.operations import ( PROJECT_NAME_ALLOWED_SYMBOLS, - PROJECT_NAME_REGEX + PROJECT_NAME_REGEX, ) from openpype.style import load_stylesheet from openpype.pipeline import AvalonMongoDB @@ -266,7 +265,7 @@ class CreateProjectDialog(QtWidgets.QDialog): project_name = self.project_name_input.text() project_code = self.project_code_input.text() library_project = self.library_project_input.isChecked() - create_project(project_name, project_code, library_project, self.dbcon) + create_project(project_name, project_code, library_project) self.done(1) diff --git a/openpype/tools/project_manager/project_manager/window.py b/openpype/tools/project_manager/project_manager/window.py index c6ae0ff352..3b2dea8ca3 100644 --- a/openpype/tools/project_manager/project_manager/window.py +++ b/openpype/tools/project_manager/project_manager/window.py @@ -1,5 +1,12 @@ from Qt import QtWidgets, QtCore, QtGui +from openpype import resources +from openpype.style import load_stylesheet +from openpype.widgets import PasswordDialog +from openpype.lib import is_admin_password_required, Logger +from openpype.pipeline import AvalonMongoDB +from openpype.pipeline.project_folders import create_project_folders + from . import ( ProjectModel, ProjectProxyFilter, @@ -13,17 +20,6 @@ from . import ( ) from .widgets import ConfirmProjectDeletion from .style import ResourceCache -from openpype.style import load_stylesheet -from openpype.lib import is_admin_password_required -from openpype.widgets import PasswordDialog -from openpype.pipeline import AvalonMongoDB - -from openpype import resources -from openpype.api import ( - get_project_basic_paths, - create_project_folders, - Logger -) class ProjectManagerWindow(QtWidgets.QWidget): @@ -259,12 +255,8 @@ class ProjectManagerWindow(QtWidgets.QWidget): qm.Yes | qm.No) if ans == qm.Yes: try: - # Get paths based on presets - basic_paths = get_project_basic_paths(project_name) - if not basic_paths: - pass # Invoking OpenPype API to create the project folders - create_project_folders(basic_paths, project_name) + create_project_folders(project_name) except Exception as exc: self.log.warning( "Cannot create starting folders: {}".format(exc), diff --git a/openpype/tools/publisher/__init__.py b/openpype/tools/publisher/__init__.py index a7b597eece..e69de29bb2 100644 --- a/openpype/tools/publisher/__init__.py +++ b/openpype/tools/publisher/__init__.py @@ -1,7 +0,0 @@ -from .app import show -from .window import PublisherWindow - -__all__ = ( - "show", - "PublisherWindow" -) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index b48bb61386..d2d01e7921 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1,26 +1,36 @@ import os import copy -import inspect import logging import traceback import collections +import uuid +from abc import ABCMeta, abstractmethod, abstractproperty -import weakref -try: - from weakref import WeakMethod -except Exception: - from openpype.lib.python_2_comp import WeakMethod - +import six import pyblish.api -from openpype.client import get_assets +from openpype.client import ( + get_assets, + get_asset_by_id, + get_subsets, +) +from openpype.lib.events import EventSystem +from openpype.lib.attribute_definitions import ( + serialize_attr_defs, + deserialize_attr_defs, +) from openpype.pipeline import ( PublishValidationError, + KnownPublishError, registered_host, + legacy_io, +) +from openpype.pipeline.create import ( + CreateContext, + AutoCreator, + HiddenCreator, + Creator, ) -from openpype.pipeline.create import CreateContext - -from Qt import QtCore # Define constant for plugin orders offset PLUGIN_ORDER_OFFSET = 0.5 @@ -28,6 +38,7 @@ PLUGIN_ORDER_OFFSET = 0.5 class MainThreadItem: """Callback with args and kwargs.""" + def __init__(self, callback, *args, **kwargs): self.callback = callback self.args = args @@ -37,64 +48,9 @@ class MainThreadItem: self.callback(*self.args, **self.kwargs) -class MainThreadProcess(QtCore.QObject): - """Qt based main thread process executor. - - Has timer which controls each 50ms if there is new item to process. - - This approach gives ability to update UI meanwhile plugin is in progress. - """ - - count_timeout = 2 - - def __init__(self): - super(MainThreadProcess, self).__init__() - self._items_to_process = collections.deque() - - timer = QtCore.QTimer() - timer.setInterval(0) - - timer.timeout.connect(self._execute) - - self._timer = timer - self._switch_counter = self.count_timeout - - def process(self, func, *args, **kwargs): - item = MainThreadItem(func, *args, **kwargs) - self.add_item(item) - - def add_item(self, item): - self._items_to_process.append(item) - - def _execute(self): - if not self._items_to_process: - return - - if self._switch_counter > 0: - self._switch_counter -= 1 - return - - self._switch_counter = self.count_timeout - - item = self._items_to_process.popleft() - item.process() - - def start(self): - if not self._timer.isActive(): - self._timer.start() - - def stop(self): - if self._timer.isActive(): - self._timer.stop() - - def clear(self): - if self._timer.isActive(): - self._timer.stop() - self._items_to_process = collections.deque() - - class AssetDocsCache: """Cache asset documents for creation part.""" + projection = { "_id": True, "name": True, @@ -105,44 +61,100 @@ class AssetDocsCache: def __init__(self, controller): self._controller = controller self._asset_docs = None + self._asset_docs_hierarchy = None self._task_names_by_asset_name = {} - - @property - def dbcon(self): - return self._controller.dbcon + self._asset_docs_by_name = {} + self._full_asset_docs_by_name = {} def reset(self): self._asset_docs = None + self._asset_docs_hierarchy = None self._task_names_by_asset_name = {} + self._asset_docs_by_name = {} + self._full_asset_docs_by_name = {} def _query(self): - if self._asset_docs is None: - project_name = self.dbcon.active_project() - asset_docs = get_assets( - project_name, fields=self.projection.keys() - ) - task_names_by_asset_name = {} - for asset_doc in asset_docs: - asset_name = asset_doc["name"] - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - task_names_by_asset_name[asset_name] = list(asset_tasks.keys()) - self._asset_docs = asset_docs - self._task_names_by_asset_name = task_names_by_asset_name + if self._asset_docs is not None: + return + + project_name = self._controller.project_name + asset_docs = get_assets( + project_name, fields=self.projection.keys() + ) + asset_docs_by_name = {} + task_names_by_asset_name = {} + for asset_doc in asset_docs: + if "data" not in asset_doc: + asset_doc["data"] = {"tasks": {}, "visualParent": None} + elif "tasks" not in asset_doc["data"]: + asset_doc["data"]["tasks"] = {} + + asset_name = asset_doc["name"] + asset_tasks = asset_doc["data"]["tasks"] + task_names_by_asset_name[asset_name] = list(asset_tasks.keys()) + asset_docs_by_name[asset_name] = asset_doc + + self._asset_docs = asset_docs + self._asset_docs_by_name = asset_docs_by_name + self._task_names_by_asset_name = task_names_by_asset_name def get_asset_docs(self): self._query() return copy.deepcopy(self._asset_docs) + def get_asset_hierarchy(self): + """Prepare asset documents into hierarchy. + + Convert ObjectId to string. Asset id is not used during whole + process of publisher but asset name is used rather. + + Returns: + Dict[Union[str, None]: Any]: Mapping of parent id to it's children. + Top level assets have parent id 'None'. + """ + + if self._asset_docs_hierarchy is None: + _queue = collections.deque(self.get_asset_docs()) + + output = collections.defaultdict(list) + while _queue: + asset_doc = _queue.popleft() + asset_doc["_id"] = str(asset_doc["_id"]) + parent_id = asset_doc["data"]["visualParent"] + if parent_id is not None: + parent_id = str(parent_id) + asset_doc["data"]["visualParent"] = parent_id + output[parent_id].append(asset_doc) + self._asset_docs_hierarchy = output + return copy.deepcopy(self._asset_docs_hierarchy) + def get_task_names_by_asset_name(self): self._query() return copy.deepcopy(self._task_names_by_asset_name) + def get_asset_by_name(self, asset_name): + self._query() + asset_doc = self._asset_docs_by_name.get(asset_name) + if asset_doc is None: + return None + return copy.deepcopy(asset_doc) + + def get_full_asset_by_name(self, asset_name): + self._query() + if asset_name not in self._full_asset_docs_by_name: + asset_doc = self._asset_docs_by_name.get(asset_name) + project_name = self._controller.project_name + full_asset_doc = get_asset_by_id(project_name, asset_doc["_id"]) + self._full_asset_docs_by_name[asset_name] = full_asset_doc + return copy.deepcopy(self._full_asset_docs_by_name[asset_name]) + class PublishReport: """Report for single publishing process. Report keeps current state of publishing and currently processed plugin. """ + def __init__(self, controller): self.controller = controller self._publish_discover_result = None @@ -223,13 +235,15 @@ class PublishReport: def add_result(self, result): """Handle result of one plugin and it's instance.""" + instance = result["instance"] instance_id = None if instance is not None: instance_id = instance.id self._current_plugin_data["instances_data"].append({ "id": instance_id, - "logs": self._extract_instance_log_items(result) + "logs": self._extract_instance_log_items(result), + "process_time": result["duration"] }) def add_action_result(self, action, result): @@ -279,7 +293,9 @@ class PublishReport: "plugins_data": plugins_data, "instances": instances_details, "context": self._extract_context_data(self._current_context), - "crashed_file_paths": crashed_file_paths + "crashed_file_paths": crashed_file_paths, + "id": str(uuid.uuid4()), + "report_version": "1.0.0" } def _extract_context_data(self, context): @@ -351,7 +367,1140 @@ class PublishReport: return output -class PublisherController: +class PublishPluginsProxy: + """Wrapper around publish plugin. + + Prepare mapping for publish plugins and actions. Also can create + serializable data for plugin actions so UI don't have to have access to + them. + + This object is created in process where publishing is actually running. + + Notes: + Actions have id but single action can be used on multiple plugins so + to run an action is needed combination of plugin and action. + + Args: + plugins [List[pyblish.api.Plugin]]: Discovered plugins that will be + processed. + """ + + def __init__(self, plugins): + plugins_by_id = {} + actions_by_id = {} + action_ids_by_plugin_id = {} + for plugin in plugins: + plugin_id = plugin.id + plugins_by_id[plugin_id] = plugin + + action_ids = set() + action_ids_by_plugin_id[plugin_id] = action_ids + + actions = getattr(plugin, "actions", None) or [] + for action in actions: + action_id = action.id + action_ids.add(action_id) + actions_by_id[action_id] = action + + self._plugins_by_id = plugins_by_id + self._actions_by_id = actions_by_id + self._action_ids_by_plugin_id = action_ids_by_plugin_id + + def get_action(self, action_id): + return self._actions_by_id[action_id] + + def get_plugin(self, plugin_id): + return self._plugins_by_id[plugin_id] + + def get_plugin_id(self, plugin): + """Get id of plugin based on plugin object. + + It's used for validation errors report. + + Args: + plugin (pyblish.api.Plugin): Publish plugin for which id should be + returned. + + Returns: + str: Plugin id. + """ + + return plugin.id + + def get_plugin_action_items(self, plugin_id): + """Get plugin action items for plugin by it's id. + + Args: + plugin_id (str): Publish plugin id. + + Returns: + List[PublishPluginActionItem]: Items with information about publish + plugin actions. + """ + + return [ + self._create_action_item(self._actions_by_id[action_id], plugin_id) + for action_id in self._action_ids_by_plugin_id[plugin_id] + ] + + def _create_action_item(self, action, plugin_id): + label = action.label or action.__name__ + icon = getattr(action, "icon", None) + return PublishPluginActionItem( + action.id, + plugin_id, + action.active, + action.on, + label, + icon + ) + + +class PublishPluginActionItem: + """Representation of publish plugin action. + + Data driven object which is used as proxy for controller and UI. + + Args: + action_id (str): Action id. + plugin_id (str): Plugin id. + active (bool): Action is active. + on_filter (str): Actions have 'on' attribte which define when can be + action triggered (e.g. 'all', 'failed', ...). + label (str): Action's label. + icon (Union[str, None]) Action's icon. + """ + + def __init__(self, action_id, plugin_id, active, on_filter, label, icon): + self.action_id = action_id + self.plugin_id = plugin_id + self.active = active + self.on_filter = on_filter + self.label = label + self.icon = icon + + def to_data(self): + """Serialize object to dictionary. + + Returns: + Dict[str, Union[str,bool,None]]: Serialized object. + """ + + return { + "action_id": self.action_id, + "plugin_id": self.plugin_id, + "active": self.active, + "on_filter": self.on_filter, + "label": self.label, + "icon": self.icon + } + + @classmethod + def from_data(cls, data): + """Create object from data. + + Args: + data (Dict[str, Union[str,bool,None]]): Data used to recreate + object. + + Returns: + PublishPluginActionItem: Object created using data. + """ + + return cls(**data) + + +class ValidationErrorItem: + """Data driven validation error item. + + Prepared data container with information about validation error and it's + source plugin. + + Can be converted to raw data and recreated should be used for controller + and UI connection. + + Args: + instance_id (str): Id of pyblish instance to which is validation error + connected. + instance_label (str): Prepared instance label. + plugin_id (str): Id of pyblish Plugin which triggered the validation + error. Id is generated using 'PublishPluginsProxy'. + """ + + def __init__( + self, + instance_id, + instance_label, + plugin_id, + context_validation, + title, + description, + detail, + ): + self.instance_id = instance_id + self.instance_label = instance_label + self.plugin_id = plugin_id + self.context_validation = context_validation + self.title = title + self.description = description + self.detail = detail + + def to_data(self): + """Serialize object to dictionary. + + Returns: + Dict[str, Union[str, bool, None]]: Serialized object data. + """ + + return { + "instance_id": self.instance_id, + "instance_label": self.instance_label, + "plugin_id": self.plugin_id, + "context_validation": self.context_validation, + "title": self.title, + "description": self.description, + "detail": self.detail, + } + + @classmethod + def from_result(cls, plugin_id, error, instance): + """Create new object based on resukt from controller. + + Returns: + ValidationErrorItem: New object with filled data. + """ + + instance_label = None + instance_id = None + if instance is not None: + instance_label = ( + instance.data.get("label") or instance.data.get("name") + ) + instance_id = instance.id + + return cls( + instance_id, + instance_label, + plugin_id, + instance is None, + error.title, + error.description, + error.detail, + ) + + @classmethod + def from_data(cls, data): + return cls(**data) + + +class PublishValidationErrorsReport: + """Publish validation errors report that can be parsed to raw data. + + Args: + error_items (List[ValidationErrorItem]): List of validation errors. + plugin_action_items (Dict[str, PublishPluginActionItem]): Action items + by plugin id. + """ + + def __init__(self, error_items, plugin_action_items): + self._error_items = error_items + self._plugin_action_items = plugin_action_items + + def __iter__(self): + for item in self._error_items: + yield item + + def group_items_by_title(self): + """Group errors by plugin and their titles. + + Items are grouped by plugin and title -> same title from different + plugin is different item. Items are ordered by plugin order. + + Returns: + List[Dict[str, Any]]: List where each item title, instance + information related to title and possible plugin actions. + """ + + ordered_plugin_ids = [] + error_items_by_plugin_id = collections.defaultdict(list) + for error_item in self._error_items: + plugin_id = error_item.plugin_id + if plugin_id not in ordered_plugin_ids: + ordered_plugin_ids.append(plugin_id) + error_items_by_plugin_id[plugin_id].append(error_item) + + grouped_error_items = [] + for plugin_id in ordered_plugin_ids: + plugin_action_items = self._plugin_action_items[plugin_id] + error_items = error_items_by_plugin_id[plugin_id] + + titles = [] + error_items_by_title = collections.defaultdict(list) + for error_item in error_items: + title = error_item.title + if title not in titles: + titles.append(error_item.title) + error_items_by_title[title].append(error_item) + + for title in titles: + grouped_error_items.append({ + "plugin_action_items": list(plugin_action_items), + "error_items": error_items_by_title[title], + "title": title + }) + return grouped_error_items + + def to_data(self): + """Serialize object to dictionary. + + Returns: + Dict[str, Any]: Serialized data. + """ + + error_items = [ + item.to_data() + for item in self._error_items + ] + + plugin_action_items = { + plugin_id: [ + action_item.to_data() + for action_item in action_items + ] + for plugin_id, action_items in self._plugin_action_items.items() + } + + return { + "error_items": error_items, + "plugin_action_items": plugin_action_items + } + + @classmethod + def from_data(cls, data): + """Recreate object from data. + + Args: + data (dict[str, Any]): Data to recreate object. Can be created + using 'to_data' method. + + Returns: + PublishValidationErrorsReport: New object based on data. + """ + + error_items = [ + ValidationErrorItem.from_data(error_item) + for error_item in data["error_items"] + ] + plugin_action_items = [ + PublishPluginActionItem.from_data(action_item) + for action_item in data["plugin_action_items"] + ] + return cls(error_items, plugin_action_items) + + +class PublishValidationErrors: + """Object to keep track about validation errors by plugin.""" + + def __init__(self): + self._plugins_proxy = None + self._error_items = [] + self._plugin_action_items = {} + + def __bool__(self): + return self.has_errors + + @property + def has_errors(self): + """At least one error was added.""" + + return bool(self._error_items) + + def reset(self, plugins_proxy): + """Reset object to default state. + + Args: + plugins_proxy (PublishPluginsProxy): Proxy which store plugins, + actions by ids and create mapping of action ids by plugin ids. + """ + + self._plugins_proxy = plugins_proxy + self._error_items = [] + self._plugin_action_items = {} + + def create_report(self): + """Create report based on currently existing errors. + + Returns: + PublishValidationErrorsReport: Validation error report with all + error information and publish plugin action items. + """ + + return PublishValidationErrorsReport( + self._error_items, self._plugin_action_items + ) + + def add_error(self, plugin, error, instance): + """Add error from pyblish result. + + Args: + plugin (pyblish.api.Plugin): Plugin which triggered error. + error (ValidationException): Validation error. + instance (Union[pyblish.api.Instance, None]): Instance on which was + error raised or None if was raised on context. + """ + + # Make sure the cached report is cleared + plugin_id = self._plugins_proxy.get_plugin_id(plugin) + self._error_items.append( + ValidationErrorItem.from_result(plugin_id, error, instance) + ) + if plugin_id in self._plugin_action_items: + return + + plugin_actions = self._plugins_proxy.get_plugin_action_items( + plugin_id + ) + self._plugin_action_items[plugin_id] = plugin_actions + + +class CreatorType: + def __init__(self, name): + self.name = name + + def __str__(self): + return self.name + + def __eq__(self, other): + return self.name == str(other) + + +class CreatorTypes: + base = CreatorType("base") + auto = CreatorType("auto") + hidden = CreatorType("hidden") + artist = CreatorType("artist") + + @classmethod + def from_str(cls, value): + for creator_type in ( + cls.base, + cls.auto, + cls.hidden, + cls.artist + ): + if value == creator_type: + return creator_type + raise ValueError("Unknown type \"{}\"".format(str(value))) + + +class CreatorItem: + """Wrapper around Creator plugin. + + Object can be serialized and recreated. + """ + + def __init__( + self, + identifier, + creator_type, + family, + label, + group_label, + icon, + instance_attributes_defs, + description, + detailed_description, + default_variant, + default_variants, + create_allow_context_change, + pre_create_attributes_defs + ): + self.identifier = identifier + self.creator_type = creator_type + self.family = family + self.label = label + self.group_label = group_label + self.icon = icon + self.description = description + self.detailed_description = detailed_description + self.default_variant = default_variant + self.default_variants = default_variants + self.create_allow_context_change = create_allow_context_change + self.instance_attributes_defs = instance_attributes_defs + self.pre_create_attributes_defs = pre_create_attributes_defs + + def get_instance_attr_defs(self): + return self.instance_attributes_defs + + def get_group_label(self): + return self.group_label + + @classmethod + def from_creator(cls, creator): + if isinstance(creator, AutoCreator): + creator_type = CreatorTypes.auto + elif isinstance(creator, HiddenCreator): + creator_type = CreatorTypes.hidden + elif isinstance(creator, Creator): + creator_type = CreatorTypes.artist + else: + creator_type = CreatorTypes.base + + description = None + detail_description = None + default_variant = None + default_variants = None + pre_create_attr_defs = None + create_allow_context_change = None + if creator_type is CreatorTypes.artist: + description = creator.get_description() + detail_description = creator.get_detail_description() + default_variant = creator.get_default_variant() + default_variants = creator.get_default_variants() + pre_create_attr_defs = creator.get_pre_create_attr_defs() + create_allow_context_change = creator.create_allow_context_change + + identifier = creator.identifier + return cls( + identifier, + creator_type, + creator.family, + creator.label or identifier, + creator.get_group_label(), + creator.get_icon(), + creator.get_instance_attr_defs(), + description, + detail_description, + default_variant, + default_variants, + create_allow_context_change, + pre_create_attr_defs + ) + + def to_data(self): + instance_attributes_defs = None + if self.instance_attributes_defs is not None: + instance_attributes_defs = serialize_attr_defs( + self.instance_attributes_defs + ) + + pre_create_attributes_defs = None + if self.pre_create_attributes_defs is not None: + instance_attributes_defs = serialize_attr_defs( + self.pre_create_attributes_defs + ) + + return { + "identifier": self.identifier, + "creator_type": str(self.creator_type), + "family": self.family, + "label": self.label, + "group_label": self.group_label, + "icon": self.icon, + "description": self.description, + "detailed_description": self.detailed_description, + "default_variant": self.default_variant, + "default_variants": self.default_variants, + "create_allow_context_change": self.create_allow_context_change, + "instance_attributes_defs": instance_attributes_defs, + "pre_create_attributes_defs": pre_create_attributes_defs, + } + + @classmethod + def from_data(cls, data): + instance_attributes_defs = data["instance_attributes_defs"] + if instance_attributes_defs is not None: + data["instance_attributes_defs"] = deserialize_attr_defs( + instance_attributes_defs + ) + + pre_create_attributes_defs = data["pre_create_attributes_defs"] + if pre_create_attributes_defs is not None: + data["pre_create_attributes_defs"] = deserialize_attr_defs( + pre_create_attributes_defs + ) + + data["creator_type"] = CreatorTypes.from_str(data["creator_type"]) + return cls(**data) + + +@six.add_metaclass(ABCMeta) +class AbstractPublisherController(object): + """Publisher tool controller. + + Define what must be implemented to be able use Publisher functionality. + + Goal is to have "data driven" controller that can be used to control UI + running in different process. That lead to some disadvantages like UI can't + access objects directly but by using wrappers that can be serialized. + """ + + @abstractproperty + def log(self): + """Controller's logger object. + + Returns: + logging.Logger: Logger object that can be used for logging. + """ + + pass + + @abstractproperty + def event_system(self): + """Inner event system for publisher controller.""" + + pass + + @abstractproperty + def project_name(self): + """Current context project name. + + Returns: + str: Name of project. + """ + + pass + + @abstractproperty + def current_asset_name(self): + """Current context asset name. + + Returns: + Union[str, None]: Name of asset. + """ + + pass + + @abstractproperty + def current_task_name(self): + """Current context task name. + + Returns: + Union[str, None]: Name of task. + """ + + pass + + @abstractproperty + def host_is_valid(self): + """Host is valid for creation part. + + Host must have implemented certain functionality to be able create + in Publisher tool. + + Returns: + bool: Host can handle creation of instances. + """ + + pass + + @abstractproperty + def instances(self): + """Collected/created instances. + + Returns: + List[CreatedInstance]: List of created instances. + """ + + pass + + @abstractmethod + def get_context_title(self): + """Get context title for artist shown at the top of main window. + + Returns: + Union[str, None]: Context title for window or None. In case of None + a warning is displayed (not nice for artists). + """ + + pass + + @abstractmethod + def get_asset_docs(self): + pass + + @abstractmethod + def get_asset_hierarchy(self): + pass + + @abstractmethod + def get_task_names_by_asset_names(self, asset_names): + pass + + @abstractmethod + def get_existing_subset_names(self, asset_name): + pass + + @abstractmethod + def reset(self): + """Reset whole controller. + + This should reset create context, publish context and all variables + that are related to it. + """ + + pass + + @abstractmethod + def get_creator_attribute_definitions(self, instances): + pass + + @abstractmethod + def get_publish_attribute_definitions(self, instances, include_context): + pass + + @abstractmethod + def get_creator_icon(self, identifier): + """Receive creator's icon by identifier. + + Args: + identifier (str): Creator's identifier. + + Returns: + Union[str, None]: Creator's icon string. + """ + + pass + + @abstractmethod + def get_subset_name( + self, + creator_identifier, + variant, + task_name, + asset_name, + instance_id=None + ): + """Get subset name based on passed data. + + Args: + creator_identifier (str): Identifier of creator which should be + responsible for subset name creation. + variant (str): Variant value from user's input. + task_name (str): Name of task for which is instance created. + asset_name (str): Name of asset for which is instance created. + instance_id (Union[str, None]): Existing instance id when subset + name is updated. + """ + + pass + + @abstractmethod + def create( + self, creator_identifier, subset_name, instance_data, options + ): + """Trigger creation by creator identifier. + + Should also trigger refresh of instanes. + + Args: + creator_identifier (str): Identifier of Creator plugin. + subset_name (str): Calculated subset name. + instance_data (Dict[str, Any]): Base instance data with variant, + asset name and task name. + options (Dict[str, Any]): Data from pre-create attributes. + """ + + def save_changes(self): + """Save changes in create context.""" + + pass + + def remove_instances(self, instance_ids): + """Remove list of instances from create context.""" + # TODO expect instance ids + + pass + + @abstractproperty + def publish_has_finished(self): + """Has publishing finished. + + Returns: + bool: If publishing finished and all plugins were iterated. + """ + + pass + + @abstractproperty + def publish_is_running(self): + """Publishing is running right now. + + Returns: + bool: If publishing is in progress. + """ + + pass + + @abstractproperty + def publish_has_validated(self): + """Publish validation passed. + + Returns: + bool: If publishing passed last possible validation order. + """ + + pass + + @abstractproperty + def publish_has_crashed(self): + """Publishing crashed for any reason. + + Returns: + bool: Publishing crashed. + """ + + pass + + @abstractproperty + def publish_has_validation_errors(self): + """During validation happened at least one validation error. + + Returns: + bool: Validation error was raised during validation. + """ + + pass + + @abstractproperty + def publish_max_progress(self): + """Get maximum possible progress number. + + Returns: + int: Number that can be used as 100% of publish progress bar. + """ + + pass + + @abstractproperty + def publish_progress(self): + """Current progress number. + + Returns: + int: Current progress value from 0 to 'publish_max_progress'. + """ + + pass + + @abstractproperty + def publish_error_msg(self): + """Current error message which cause fail of publishing. + + Returns: + Union[str, None]: Message which will be showed to artist or + None. + """ + + pass + + @abstractmethod + def get_publish_report(self): + pass + + @abstractmethod + def get_validation_errors(self): + pass + + @abstractmethod + def publish(self): + """Trigger publishing without any order limitations.""" + + pass + + @abstractmethod + def validate(self): + """Trigger publishing which will stop after validation order.""" + + pass + + @abstractmethod + def stop_publish(self): + """Stop publishing can be also used to pause publishing. + + Pause of publishing is possible only if all plugins successfully + finished. + """ + + pass + + @abstractmethod + def run_action(self, plugin_id, action_id): + """Trigger pyblish action on a plugin. + + Args: + plugin_id (str): Id of publish plugin. + action_id (str): Id of publish action. + """ + + pass + + @abstractmethod + def set_comment(self, comment): + """Set comment on pyblish context. + + Set "comment" key on current pyblish.api.Context data. + + Args: + comment (str): Artist's comment. + """ + + pass + + @abstractmethod + def emit_card_message(self, message): + """Emit a card message which can have a lifetime. + + This is for UI purposes. Method can be extended to more arguments + in future e.g. different message timeout or type (color). + + Args: + message (str): Message that will be showed. + """ + + pass + + +class BasePublisherController(AbstractPublisherController): + """Implement common logic for controllers. + + Implement event system, logger and common attributes. Attributes are + triggering value changes so anyone can listen to their topics. + + Prepare implementation for creator items. Controller must implement just + their filling by '_collect_creator_items'. + + All prepared implementation is based on calling super '__init__'. + """ + + def __init__(self): + self._log = None + self._event_system = None + + # Host is valid for creation + self._host_is_valid = False + + # Any other exception that happened during publishing + self._publish_error_msg = None + # Publishing is in progress + self._publish_is_running = False + # Publishing is over validation order + self._publish_has_validated = False + + self._publish_has_validation_errors = False + self._publish_has_crashed = False + # All publish plugins are processed + self._publish_has_finished = False + self._publish_max_progress = 0 + self._publish_progress = 0 + + # Controller must '_collect_creator_items' to fill the value + self._creator_items = None + + @property + def log(self): + """Controller's logger object. + + Returns: + logging.Logger: Logger object that can be used for logging. + """ + + if self._log is None: + self._log = logging.getLogget(self.__class__.__name__) + return self._log + + @property + def event_system(self): + """Inner event system for publisher controller. + + Is used for communication with UI. Event system is autocreated. + + Known topics: + "show.detailed.help" - Detailed help requested (UI related). + "show.card.message" - Show card message request (UI related). + "instances.refresh.finished" - Instances are refreshed. + "plugins.refresh.finished" - Plugins refreshed. + "publish.reset.finished" - Publish context reset finished. + "controller.reset.finished" - Controller reset finished. + "publish.process.started" - Publishing started. Can be started from + paused state. + "publish.process.stopped" - Publishing stopped/paused process. + "publish.process.plugin.changed" - Plugin state has changed. + "publish.process.instance.changed" - Instance state has changed. + "publish.has_validated.changed" - Attr 'publish_has_validated' + changed. + "publish.is_running.changed" - Attr 'publish_is_running' changed. + "publish.has_crashed.changed" - Attr 'publish_has_crashed' changed. + "publish.publish_error.changed" - Attr 'publish_error' + "publish.has_validation_errors.changed" - Attr + 'has_validation_errors' changed. + "publish.max_progress.changed" - Attr 'publish_max_progress' + changed. + "publish.progress.changed" - Attr 'publish_progress' changed. + "publish.host_is_valid.changed" - Attr 'host_is_valid' changed. + "publish.finished.changed" - Attr 'publish_has_finished' changed. + + Returns: + EventSystem: Event system which can trigger callbacks for topics. + """ + + if self._event_system is None: + self._event_system = EventSystem() + return self._event_system + + def _emit_event(self, topic, data=None): + if data is None: + data = {} + self.event_system.emit(topic, data, "controller") + + def _get_host_is_valid(self): + return self._host_is_valid + + def _set_host_is_valid(self, value): + if self._host_is_valid != value: + self._host_is_valid = value + self._emit_event("publish.host_is_valid.changed", {"value": value}) + + def _get_publish_has_finished(self): + return self._publish_has_finished + + def _set_publish_has_finished(self, value): + if self._publish_has_finished != value: + self._publish_has_finished = value + self._emit_event("publish.finished.changed", {"value": value}) + + def _get_publish_is_running(self): + return self._publish_is_running + + def _set_publish_is_running(self, value): + if self._publish_is_running != value: + self._publish_is_running = value + self._emit_event("publish.is_running.changed", {"value": value}) + + def _get_publish_has_validated(self): + return self._publish_has_validated + + def _set_publish_has_validated(self, value): + if self._publish_has_validated != value: + self._publish_has_validated = value + self._emit_event("publish.has_validated.changed", {"value": value}) + + def _get_publish_has_crashed(self): + return self._publish_has_crashed + + def _set_publish_has_crashed(self, value): + if self._publish_has_crashed != value: + self._publish_has_crashed = value + self._emit_event("publish.has_crashed.changed", {"value": value}) + + def _get_publish_has_validation_errors(self): + return self._publish_has_validation_errors + + def _set_publish_has_validation_errors(self, value): + if self._publish_has_validation_errors != value: + self._publish_has_validation_errors = value + self._emit_event( + "publish.has_validation_errors.changed", + {"value": value} + ) + + def _get_publish_max_progress(self): + return self._publish_max_progress + + def _set_publish_max_progress(self, value): + if self._publish_max_progress != value: + self._publish_max_progress = value + self._emit_event("publish.max_progress.changed", {"value": value}) + + def _get_publish_progress(self): + return self._publish_progress + + def _set_publish_progress(self, value): + if self._publish_progress != value: + self._publish_progress = value + self._emit_event("publish.progress.changed", {"value": value}) + + def _get_publish_error_msg(self): + return self._publish_error_msg + + def _set_publish_error_msg(self, value): + if self._publish_error_msg != value: + self._publish_error_msg = value + self._emit_event("publish.publish_error.changed", {"value": value}) + + host_is_valid = property( + _get_host_is_valid, _set_host_is_valid + ) + publish_has_finished = property( + _get_publish_has_finished, _set_publish_has_finished + ) + publish_is_running = property( + _get_publish_is_running, _set_publish_is_running + ) + publish_has_validated = property( + _get_publish_has_validated, _set_publish_has_validated + ) + publish_has_crashed = property( + _get_publish_has_crashed, _set_publish_has_crashed + ) + publish_has_validation_errors = property( + _get_publish_has_validation_errors, _set_publish_has_validation_errors + ) + publish_max_progress = property( + _get_publish_max_progress, _set_publish_max_progress + ) + publish_progress = property( + _get_publish_progress, _set_publish_progress + ) + publish_error_msg = property( + _get_publish_error_msg, _set_publish_error_msg + ) + + def _reset_attributes(self): + """Reset most of attributes that can be reset.""" + + # Reset creator items + self._creator_items = None + + self.publish_is_running = False + self.publish_has_validated = False + self.publish_has_crashed = False + self.publish_has_validation_errors = False + self.publish_has_finished = False + + self.publish_error_msg = None + self.publish_progress = 0 + + @property + def creator_items(self): + """Creators that can be shown in create dialog.""" + if self._creator_items is None: + self._creator_items = self._collect_creator_items() + return self._creator_items + + @abstractmethod + def _collect_creator_items(self): + """Receive CreatorItems to work with. + + Returns: + Dict[str, CreatorItem]: Creator items by their identifier. + """ + + pass + + def get_creator_icon(self, identifier): + """Function to receive icon for creator identifier. + + Args: + str: Creator's identifier for which should be icon returned. + """ + + creator_item = self.creator_items.get(identifier) + if creator_item is not None: + return creator_item.icon + return None + + +class PublisherController(BasePublisherController): """Middleware between UI, CreateContext and publish Context. Handle both creation and publishing parts. @@ -360,35 +1509,30 @@ class PublisherController: dbcon (AvalonMongoDB): Connection to mongo with context. headless (bool): Headless publishing. ATM not implemented or used. """ - def __init__(self, dbcon=None, headless=False): - self.log = logging.getLogger("PublisherController") - self.host = registered_host() - self.headless = headless - self.create_context = CreateContext( - self.host, dbcon, headless=headless, reset=False + _log = None + + def __init__(self, dbcon=None, headless=False): + super(PublisherController, self).__init__() + + self._host = registered_host() + self._headless = headless + + self._create_context = CreateContext( + self._host, dbcon, headless=headless, reset=False ) + self._publish_plugins_proxy = None + # pyblish.api.Context self._publish_context = None # Pyblish report self._publish_report = PublishReport(self) # Store exceptions of validation error - self._publish_validation_errors = [] - # Currently processing plugin errors - self._publish_current_plugin_validation_errors = None - # Any other exception that happened during publishing - self._publish_error = None - # Publishing is in progress - self._publish_is_running = False - # Publishing is over validation order - self._publish_validated = False + self._publish_validation_errors = PublishValidationErrors() + # Publishing should stop at validation stage self._publish_up_validation = False - # All publish plugins are processed - self._publish_finished = False - self._publish_max_progress = 0 - self._publish_progress = 0 # This information is not much important for controller but for widget # which can change (and set) the comment. self._publish_comment_is_set = False @@ -400,23 +1544,9 @@ class PublisherController: pyblish.api.ValidatorOrder + PLUGIN_ORDER_OFFSET ) - # Qt based main thread processor - self._main_thread_processor = MainThreadProcess() # Plugin iterator self._main_thread_iter = None - # Variables where callbacks are stored - self._instances_refresh_callback_refs = set() - self._plugins_refresh_callback_refs = set() - - self._publish_reset_callback_refs = set() - self._publish_started_callback_refs = set() - self._publish_validated_callback_refs = set() - self._publish_stopped_callback_refs = set() - - self._publish_instance_changed_callback_refs = set() - self._publish_plugin_changed_callback_refs = set() - # State flags to prevent executing method which is already in progress self._resetting_plugins = False self._resetting_instances = False @@ -426,105 +1556,70 @@ class PublisherController: @property def project_name(self): - """Current project context.""" - return self.dbcon.Session["AVALON_PROJECT"] + """Current project context defined by host. + + Returns: + str: Project name. + """ + + if not hasattr(self._host, "get_current_context"): + return legacy_io.active_project() + + return self._host.get_current_context()["project_name"] @property - def dbcon(self): - """Pointer to AvalonMongoDB in creator context.""" - return self.create_context.dbcon + def current_asset_name(self): + """Current context asset name defined by host. + + Returns: + Union[str, None]: Asset name or None if asset is not set. + """ + + if not hasattr(self._host, "get_current_context"): + return legacy_io.Session["AVALON_ASSET"] + + return self._host.get_current_context()["asset_name"] + + @property + def current_task_name(self): + """Current context task name defined by host. + + Returns: + Union[str, None]: Task name or None if task is not set. + """ + + if not hasattr(self._host, "get_current_context"): + return legacy_io.Session["AVALON_TASK"] + + return self._host.get_current_context()["task_name"] @property def instances(self): """Current instances in create context.""" - return self.create_context.instances + return self._create_context.instances_by_id @property - def creators(self): + def _creators(self): """All creators loaded in create context.""" - return self.create_context.creators + + return self._create_context.creators @property - def manual_creators(self): - """Creators that can be shown in create dialog.""" - return self.create_context.manual_creators - - @property - def host_is_valid(self): - """Host is valid for creation.""" - return self.create_context.host_is_valid - - @property - def publish_plugins(self): + def _publish_plugins(self): """Publish plugins.""" - return self.create_context.publish_plugins - - @property - def plugins_with_defs(self): - """Publish plugins with possible attribute definitions.""" - return self.create_context.plugins_with_defs - - def _create_reference(self, callback): - if inspect.ismethod(callback): - ref = WeakMethod(callback) - elif callable(callback): - ref = weakref.ref(callback) - else: - raise TypeError("Expected function or method got {}".format( - str(type(callback)) - )) - return ref - - def add_instances_refresh_callback(self, callback): - """Callbacks triggered on instances refresh.""" - ref = self._create_reference(callback) - self._instances_refresh_callback_refs.add(ref) - - def add_plugins_refresh_callback(self, callback): - """Callbacks triggered on plugins refresh.""" - ref = self._create_reference(callback) - self._plugins_refresh_callback_refs.add(ref) + return self._create_context.publish_plugins # --- Publish specific callbacks --- - def add_publish_reset_callback(self, callback): - """Callbacks triggered on publishing reset.""" - ref = self._create_reference(callback) - self._publish_reset_callback_refs.add(ref) - - def add_publish_started_callback(self, callback): - """Callbacks triggered on publishing start.""" - ref = self._create_reference(callback) - self._publish_started_callback_refs.add(ref) - - def add_publish_validated_callback(self, callback): - """Callbacks triggered on passing last possible validation order.""" - ref = self._create_reference(callback) - self._publish_validated_callback_refs.add(ref) - - def add_instance_change_callback(self, callback): - """Callbacks triggered before next publish instance process.""" - ref = self._create_reference(callback) - self._publish_instance_changed_callback_refs.add(ref) - - def add_plugin_change_callback(self, callback): - """Callbacks triggered before next plugin processing.""" - ref = self._create_reference(callback) - self._publish_plugin_changed_callback_refs.add(ref) - - def add_publish_stopped_callback(self, callback): - """Callbacks triggered on publishing stop (any reason).""" - ref = self._create_reference(callback) - self._publish_stopped_callback_refs.add(ref) - def get_asset_docs(self): """Get asset documents from cache for whole project.""" return self._asset_docs_cache.get_asset_docs() def get_context_title(self): """Get context title for artist shown at the top of main window.""" + context_title = None - if hasattr(self.host, "get_context_title"): - context_title = self.host.get_context_title() + if hasattr(self._host, "get_context_title"): + context_title = self._host.get_context_title() if context_title is None: context_title = os.environ.get("AVALON_APP_NAME") @@ -535,14 +1630,8 @@ class PublisherController: def get_asset_hierarchy(self): """Prepare asset documents into hierarchy.""" - _queue = collections.deque(self.get_asset_docs()) - output = collections.defaultdict(list) - while _queue: - asset_doc = _queue.popleft() - parent_id = asset_doc["data"]["visualParent"] - output[parent_id].append(asset_doc) - return output + return self._asset_docs_cache.get_asset_hierarchy() def get_task_names_by_asset_names(self, asset_names): """Prepare task names by asset name.""" @@ -556,19 +1645,20 @@ class PublisherController: ) return result - def _trigger_callbacks(self, callbacks, *args, **kwargs): - """Helper method to trigger callbacks stored by their rerence.""" - # Trigger reset callbacks - to_remove = set() - for ref in callbacks: - callback = ref() - if callback: - callback(*args, **kwargs) - else: - to_remove.add(ref) + def get_existing_subset_names(self, asset_name): + project_name = self.project_name + asset_doc = self._asset_docs_cache.get_asset_by_name(asset_name) + if not asset_doc: + return None - for ref in to_remove: - callbacks.remove(ref) + asset_id = asset_doc["_id"] + subset_docs = get_subsets( + project_name, asset_ids=[asset_id], fields=["name"] + ) + return { + subset_doc["name"] + for subset_doc in subset_docs + } def reset(self): """Reset everything related to creation and publishing.""" @@ -577,14 +1667,26 @@ class PublisherController: self.save_changes() + self.host_is_valid = self._create_context.host_is_valid + + self._create_context.reset_preparation() + # Reset avalon context - self.create_context.reset_avalon_context() + self._create_context.reset_avalon_context() + + self._asset_docs_cache.reset() self._reset_plugins() # Publish part must be reset after plugins self._reset_publish() self._reset_instances() + self._create_context.reset_finalization() + + self._emit_event("controller.reset.finished") + + self.emit_card_message("Refreshed..") + def _reset_plugins(self): """Reset to initial state.""" if self._resetting_plugins: @@ -592,11 +1694,17 @@ class PublisherController: self._resetting_plugins = True - self.create_context.reset_plugins() + self._create_context.reset_plugins() self._resetting_plugins = False - self._trigger_callbacks(self._plugins_refresh_callback_refs) + self._emit_event("plugins.refresh.finished") + + def _collect_creator_items(self): + return { + identifier: CreatorItem.from_creator(creator) + for identifier, creator in self._create_context.creators.items() + } def _reset_instances(self): """Reset create instances.""" @@ -605,26 +1713,32 @@ class PublisherController: self._resetting_instances = True - self.create_context.reset_context_data() - with self.create_context.bulk_instances_collection(): - self.create_context.reset_instances() - self.create_context.execute_autocreators() + self._create_context.reset_context_data() + with self._create_context.bulk_instances_collection(): + self._create_context.reset_instances() + self._create_context.execute_autocreators() self._resetting_instances = False - self._trigger_callbacks(self._instances_refresh_callback_refs) + self._on_create_instance_change() + + def emit_card_message(self, message): + self._emit_event("show.card.message", {"message": message}) def get_creator_attribute_definitions(self, instances): """Collect creator attribute definitions for multuple instances. Args: - instances(list): List of created instances for + instances(List[CreatedInstance]): List of created instances for which should be attribute definitions returned. """ + output = [] _attr_defs = {} for instance in instances: - for attr_def in instance.creator_attribute_defs: + creator_identifier = instance.creator_identifier + creator_item = self.creator_items[creator_identifier] + for attr_def in creator_item.instance_attributes_defs: found_idx = None for idx, _attr_def in _attr_defs.items(): if attr_def == _attr_def: @@ -652,9 +1766,10 @@ class PublisherController: which should be attribute definitions returned. include_context(bool): Add context specific attribute definitions. """ + _tmp_items = [] if include_context: - _tmp_items.append(self.create_context) + _tmp_items.append(self._create_context) for instance in instances: _tmp_items.append(instance) @@ -684,7 +1799,7 @@ class PublisherController: attr_values.append((item, value)) output = [] - for plugin in self.plugins_with_defs: + for plugin in self._create_context.plugins_with_defs: plugin_name = plugin.__name__ if plugin_name not in all_defs_by_plugin_name: continue @@ -695,86 +1810,89 @@ class PublisherController: )) return output - def get_icon_for_family(self, family): - """TODO rename to get creator icon.""" - creator = self.creators.get(family) - if creator is not None: - return creator.get_icon() - return None + def get_subset_name( + self, + creator_identifier, + variant, + task_name, + asset_name, + instance_id=None + ): + """Get subset name based on passed data. + + Args: + creator_identifier (str): Identifier of creator which should be + responsible for subset name creation. + variant (str): Variant value from user's input. + task_name (str): Name of task for which is instance created. + asset_name (str): Name of asset for which is instance created. + instance_id (Union[str, None]): Existing instance id when subset + name is updated. + """ + + creator = self._creators[creator_identifier] + project_name = self.project_name + asset_doc = self._asset_docs_cache.get_full_asset_by_name(asset_name) + instance = None + if instance_id: + instance = self.instances[instance_id] + + return creator.get_subset_name( + variant, task_name, asset_doc, project_name, instance=instance + ) def create( self, creator_identifier, subset_name, instance_data, options ): """Trigger creation and refresh of instances in UI.""" - creator = self.creators[creator_identifier] + creator = self._creators[creator_identifier] creator.create(subset_name, instance_data, options) - self._trigger_callbacks(self._instances_refresh_callback_refs) + self._on_create_instance_change() def save_changes(self): """Save changes happened during creation.""" - if self.create_context.host_is_valid: - self.create_context.save_changes() + if self._create_context.host_is_valid: + self._create_context.save_changes() - def remove_instances(self, instances): - """""" + def remove_instances(self, instance_ids): + """Remove instances based on instance ids. + + Args: + instance_ids (List[str]): List of instance ids to remove. + """ + # TODO expect instance ids instead of instances # QUESTION Expect that instances are really removed? In that case save # reset is not required and save changes too. self.save_changes() - self.create_context.remove_instances(instances) + self._remove_instances_from_context(instance_ids) - self._trigger_callbacks(self._instances_refresh_callback_refs) + self._on_create_instance_change() - # --- Publish specific implementations --- - @property - def publish_has_finished(self): - return self._publish_finished + def _remove_instances_from_context(self, instance_ids): + instances_by_id = self._create_context.instances_by_id + instances = [ + instances_by_id[instance_id] + for instance_id in instance_ids + ] + self._create_context.remove_instances(instances) - @property - def publish_is_running(self): - return self._publish_is_running - - @property - def publish_has_validated(self): - return self._publish_validated - - @property - def publish_has_crashed(self): - return bool(self._publish_error) - - @property - def publish_has_validation_errors(self): - return bool(self._publish_validation_errors) - - @property - def publish_max_progress(self): - return self._publish_max_progress - - @property - def publish_progress(self): - return self._publish_progress - - @property - def publish_comment_is_set(self): - return self._publish_comment_is_set - - def get_publish_crash_error(self): - return self._publish_error + def _on_create_instance_change(self): + self._emit_event("instances.refresh.finished") def get_publish_report(self): - return self._publish_report.get_report(self.publish_plugins) + return self._publish_report.get_report(self._publish_plugins) def get_validation_errors(self): - return self._publish_validation_errors + return self._publish_validation_errors.create_report() def _reset_publish(self): - self._publish_is_running = False - self._publish_validated = False + self._reset_attributes() + self._publish_up_validation = False - self._publish_finished = False self._publish_comment_is_set = False - self._main_thread_processor.clear() + self._main_thread_iter = self._publish_iterator() self._publish_context = pyblish.api.Context() # Make sure "comment" is set on publish context @@ -783,21 +1901,30 @@ class PublisherController: # - must not be used for changing CreatedInstances during publishing! # QUESTION # - pop the key after first collector using it would be safest option? - self._publish_context.data["create_context"] = self.create_context + self._publish_context.data["create_context"] = self._create_context - self._publish_report.reset(self._publish_context, self.create_context) - self._publish_validation_errors = [] - self._publish_current_plugin_validation_errors = None - self._publish_error = None + self._publish_plugins_proxy = PublishPluginsProxy( + self._publish_plugins + ) - self._publish_max_progress = len(self.publish_plugins) - self._publish_progress = 0 + self._publish_report.reset(self._publish_context, self._create_context) + self._publish_validation_errors.reset(self._publish_plugins_proxy) - self._trigger_callbacks(self._publish_reset_callback_refs) + self.publish_max_progress = len(self._publish_plugins) + + self._emit_event("publish.reset.finished") def set_comment(self, comment): - self._publish_context.data["comment"] = comment - self._publish_comment_is_set = True + """Set comment from ui to pyblish context. + + This should be called always before publishing is started but should + happen only once on first publish start thus variable + '_publish_comment_is_set' is used to keep track about the information. + """ + + if not self._publish_comment_is_set: + self._publish_context.data["comment"] = comment + self._publish_comment_is_set = True def publish(self): """Run publishing.""" @@ -806,37 +1933,42 @@ class PublisherController: def validate(self): """Run publishing and stop after Validation.""" - if self._publish_validated: + if self.publish_has_validated: return self._publish_up_validation = True self._start_publish() def _start_publish(self): """Start or continue in publishing.""" - if self._publish_is_running: + if self.publish_is_running: return # Make sure changes are saved self.save_changes() - self._publish_is_running = True - self._trigger_callbacks(self._publish_started_callback_refs) - self._main_thread_processor.start() + self.publish_is_running = True + + self._emit_event("publish.process.started") + self._publish_next_process() def _stop_publish(self): """Stop or pause publishing.""" - self._publish_is_running = False - self._main_thread_processor.stop() - self._trigger_callbacks(self._publish_stopped_callback_refs) + self.publish_is_running = False + + self._emit_event("publish.process.stopped") def stop_publish(self): """Stop publishing process (any reason).""" - if self._publish_is_running: + + if self.publish_is_running: self._stop_publish() - def run_action(self, plugin, action): + def run_action(self, plugin_id, action_id): # TODO handle result in UI + plugin = self._publish_plugins_proxy.get_plugin(plugin_id) + action = self._publish_plugins_proxy.get_action(action_id) + result = pyblish.plugin.process( plugin, self._publish_context, None, action.id ) @@ -850,21 +1982,24 @@ class PublisherController: # There are validation errors and validation is passed # - can't do any progree if ( - self._publish_validated - and self._publish_validation_errors + self.publish_has_validated + and self.publish_has_validation_errors ): item = MainThreadItem(self.stop_publish) # Any unexpected error happened # - everything should stop - elif self._publish_error: + elif self.publish_has_crashed: item = MainThreadItem(self.stop_publish) # Everything is ok so try to get new processing item else: item = next(self._main_thread_iter) - self._main_thread_processor.add_item(item) + self._process_main_thread_item(item) + + def _process_main_thread_item(self, item): + item() def _publish_iterator(self): """Main logic center of publishing. @@ -879,32 +2014,24 @@ class PublisherController: QUESTION: Does validate button still make sense? """ - for idx, plugin in enumerate(self.publish_plugins): + for idx, plugin in enumerate(self._publish_plugins): self._publish_progress = idx - # Reset current plugin validations error - self._publish_current_plugin_validation_errors = None - # Check if plugin is over validation order - if not self._publish_validated: - self._publish_validated = ( + if not self.publish_has_validated: + self.publish_has_validated = ( plugin.order >= self._validation_order ) - # Trigger callbacks when validation stage is passed - if self._publish_validated: - self._trigger_callbacks( - self._publish_validated_callback_refs - ) # Stop if plugin is over validation order and process # should process up to validation. - if self._publish_up_validation and self._publish_validated: + if self._publish_up_validation and self.publish_has_validated: yield MainThreadItem(self.stop_publish) # Stop if validation is over and validation errors happened if ( - self._publish_validated - and self._publish_validation_errors + self.publish_has_validated + and self.publish_has_validation_errors ): yield MainThreadItem(self.stop_publish) @@ -912,9 +2039,14 @@ class PublisherController: self._publish_report.add_plugin_iter(plugin, self._publish_context) # Trigger callback that new plugin is going to be processed - self._trigger_callbacks( - self._publish_plugin_changed_callback_refs, plugin + plugin_label = plugin.__name__ + if hasattr(plugin, "label") and plugin.label: + plugin_label = plugin.label + self._emit_event( + "publish.process.plugin.changed", + {"plugin_label": plugin_label} ) + # Plugin is instance plugin if plugin.__instanceEnabled__: instances = pyblish.logic.instances_by_plugin( @@ -928,11 +2060,15 @@ class PublisherController: if instance.data.get("publish") is False: continue - self._trigger_callbacks( - self._publish_instance_changed_callback_refs, - self._publish_context, - instance + instance_label = ( + instance.data.get("label") + or instance.data["name"] ) + self._emit_event( + "publish.process.instance.changed", + {"instance_label": instance_label} + ) + yield MainThreadItem( self._process_and_continue, plugin, instance ) @@ -944,10 +2080,14 @@ class PublisherController: [plugin], families ) if plugins: - self._trigger_callbacks( - self._publish_instance_changed_callback_refs, - self._publish_context, - None + instance_label = ( + self._publish_context.data.get("label") + or self._publish_context.data.get("name") + or "Context" + ) + self._emit_event( + "publish.process.instance.changed", + {"instance_label": instance_label} ) yield MainThreadItem( self._process_and_continue, plugin, None @@ -956,24 +2096,17 @@ class PublisherController: self._publish_report.set_plugin_skipped() # Cleanup of publishing process - self._publish_finished = True - self._publish_progress = self._publish_max_progress + self.publish_has_finished = True + self.publish_progress = self.publish_max_progress yield MainThreadItem(self.stop_publish) def _add_validation_error(self, result): - if self._publish_current_plugin_validation_errors is None: - self._publish_current_plugin_validation_errors = { - "plugin": result["plugin"], - "errors": [] - } - self._publish_validation_errors.append( - self._publish_current_plugin_validation_errors - ) - - self._publish_current_plugin_validation_errors["errors"].append({ - "exception": result["error"], - "instance": result["instance"] - }) + self.publish_has_validation_errors = True + self._publish_validation_errors.add_error( + result["plugin"], + result["error"], + result["instance"] + ) def _process_and_continue(self, plugin, instance): result = pyblish.plugin.process( @@ -986,18 +2119,23 @@ class PublisherController: if exception: if ( isinstance(exception, PublishValidationError) - and not self._publish_validated + and not self.publish_has_validated ): self._add_validation_error(result) else: - self._publish_error = exception + if isinstance(exception, KnownPublishError): + msg = str(exception) + else: + msg = ( + "Something went wrong. Send report" + " to your supervisor or OpenPype." + ) + self.publish_error_msg = msg + self.publish_has_crashed = True self._publish_next_process() - def reset_project_data_cache(self): - self._asset_docs_cache.reset() - def collect_families_from_instances(instances, only_active=False): """Collect all families for passed publish instances. diff --git a/openpype/tools/publisher/control_qt.py b/openpype/tools/publisher/control_qt.py new file mode 100644 index 0000000000..56132a4046 --- /dev/null +++ b/openpype/tools/publisher/control_qt.py @@ -0,0 +1,405 @@ +import collections +from abc import abstractmethod, abstractproperty + +from Qt import QtCore + +from openpype.lib.events import Event +from openpype.pipeline.create import CreatedInstance + +from .control import ( + MainThreadItem, + PublisherController, + BasePublisherController, +) + + +class MainThreadProcess(QtCore.QObject): + """Qt based main thread process executor. + + Has timer which controls each 50ms if there is new item to process. + + This approach gives ability to update UI meanwhile plugin is in progress. + """ + + count_timeout = 2 + + def __init__(self): + super(MainThreadProcess, self).__init__() + self._items_to_process = collections.deque() + + timer = QtCore.QTimer() + timer.setInterval(0) + + timer.timeout.connect(self._execute) + + self._timer = timer + self._switch_counter = self.count_timeout + + def process(self, func, *args, **kwargs): + item = MainThreadItem(func, *args, **kwargs) + self.add_item(item) + + def add_item(self, item): + self._items_to_process.append(item) + + def _execute(self): + if not self._items_to_process: + return + + if self._switch_counter > 0: + self._switch_counter -= 1 + return + + self._switch_counter = self.count_timeout + + item = self._items_to_process.popleft() + item.process() + + def start(self): + if not self._timer.isActive(): + self._timer.start() + + def stop(self): + if self._timer.isActive(): + self._timer.stop() + + def clear(self): + if self._timer.isActive(): + self._timer.stop() + self._items_to_process = collections.deque() + + +class QtPublisherController(PublisherController): + def __init__(self, *args, **kwargs): + self._main_thread_processor = MainThreadProcess() + + super(QtPublisherController, self).__init__(*args, **kwargs) + + self.event_system.add_callback( + "publish.process.started", self._qt_on_publish_start + ) + self.event_system.add_callback( + "publish.process.stopped", self._qt_on_publish_stop + ) + + def _reset_publish(self): + super(QtPublisherController, self)._reset_publish() + self._main_thread_processor.clear() + + def _process_main_thread_item(self, item): + self._main_thread_processor.add_item(item) + + def _qt_on_publish_start(self): + self._main_thread_processor.start() + + def _qt_on_publish_stop(self): + self._main_thread_processor.stop() + + +class QtRemotePublishController(BasePublisherController): + """Abstract Remote controller for Qt UI. + + This controller should be used in process where UI is running and should + listen and ask for data on a client side. + + All objects that are used during UI processing should be able to convert + on client side to json serializable data and then recreated here. Keep in + mind that all changes made here should be send back to client controller + before critical actions. + + ATM Was not tested and will require some changes. All code written here is + based on theoretical idea how it could work. + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + self._created_instances = {} + + @abstractmethod + def _get_serialized_instances(self): + """Receive serialized instances from client process. + + Returns: + List[Dict[str, Any]]: Serialized instances. + """ + + pass + + def _on_create_instance_change(self): + serialized_instances = self._get_serialized_instances() + + created_instances = {} + for serialized_data in serialized_instances: + item = CreatedInstance.deserialize_on_remote( + serialized_data, + self._creator_items + ) + created_instances[item.id] = item + + self._created_instances = created_instances + self._emit_event("instances.refresh.finished") + + def remote_events_handler(self, event_data): + event = Event.from_data(event_data) + + # Topics that cause "replication" of controller changes + if event.topic == "publish.max_progress.changed": + self.publish_max_progress = event["value"] + return + + if event.topic == "publish.progress.changed": + self.publish_progress = event["value"] + return + + if event.topic == "publish.has_validated.changed": + self.publish_has_validated = event["value"] + return + + if event.topic == "publish.is_running.changed": + self.publish_is_running = event["value"] + return + + if event.topic == "publish.publish_error.changed": + self.publish_error_msg = event["value"] + return + + if event.topic == "publish.has_crashed.changed": + self.publish_has_crashed = event["value"] + return + + if event.topic == "publish.has_validation_errors.changed": + self.publish_has_validation_errors = event["value"] + return + + if event.topic == "publish.finished.changed": + self.publish_has_finished = event["value"] + return + + if event.topic == "publish.host_is_valid.changed": + self.host_is_valid = event["value"] + return + + # Topics that can be just passed by because are not affecting + # controller itself + # - "show.card.message" + # - "show.detailed.help" + # - "publish.reset.finished" + # - "instances.refresh.finished" + # - "plugins.refresh.finished" + # - "controller.reset.finished" + # - "publish.process.started" + # - "publish.process.stopped" + # - "publish.process.plugin.changed" + # - "publish.process.instance.changed" + self.event_system.emit_event(event) + + @abstractproperty + def project_name(self): + """Current context project name from client. + + Returns: + str: Name of project. + """ + + pass + + @abstractproperty + def current_asset_name(self): + """Current context asset name from client. + + Returns: + Union[str, None]: Name of asset. + """ + + pass + + @abstractproperty + def current_task_name(self): + """Current context task name from client. + + Returns: + Union[str, None]: Name of task. + """ + + pass + + @property + def instances(self): + """Collected/created instances. + + Returns: + List[CreatedInstance]: List of created instances. + """ + + return self._created_instances + + def get_context_title(self): + """Get context title for artist shown at the top of main window. + + Returns: + Union[str, None]: Context title for window or None. In case of None + a warning is displayed (not nice for artists). + """ + + pass + + def get_asset_docs(self): + pass + + def get_asset_hierarchy(self): + pass + + def get_task_names_by_asset_names(self, asset_names): + pass + + def get_existing_subset_names(self, asset_name): + pass + + @abstractmethod + def get_subset_name( + self, + creator_identifier, + variant, + task_name, + asset_name, + instance_id=None + ): + """Get subset name based on passed data. + + Args: + creator_identifier (str): Identifier of creator which should be + responsible for subset name creation. + variant (str): Variant value from user's input. + task_name (str): Name of task for which is instance created. + asset_name (str): Name of asset for which is instance created. + instance_id (Union[str, None]): Existing instance id when subset + name is updated. + """ + + pass + + @abstractmethod + def create( + self, creator_identifier, subset_name, instance_data, options + ): + """Trigger creation by creator identifier. + + Should also trigger refresh of instanes. + + Args: + creator_identifier (str): Identifier of Creator plugin. + subset_name (str): Calculated subset name. + instance_data (Dict[str, Any]): Base instance data with variant, + asset name and task name. + options (Dict[str, Any]): Data from pre-create attributes. + """ + + pass + + def _get_instance_changes_for_client(self): + """Preimplemented method to receive instance changes for client.""" + + created_instance_changes = {} + for instance_id, instance in self._created_instances.items(): + created_instance_changes[instance_id] = ( + instance.remote_changes() + ) + return created_instance_changes + + @abstractmethod + def _send_instance_changes_to_client(self): + instance_changes = self._get_instance_changes_for_client() + # Implement to send 'instance_changes' value to client + + @abstractmethod + def save_changes(self): + """Save changes happened during creation.""" + + self._send_instance_changes_to_client() + + @abstractmethod + def remove_instances(self, instance_ids): + """Remove list of instances from create context.""" + # TODO add Args: + + pass + + @abstractmethod + def get_publish_report(self): + pass + + @abstractmethod + def get_validation_errors(self): + pass + + @abstractmethod + def reset(self): + """Reset whole controller. + + This should reset create context, publish context and all variables + that are related to it. + """ + + self._send_instance_changes_to_client() + pass + + @abstractmethod + def publish(self): + """Trigger publishing without any order limitations.""" + + self._send_instance_changes_to_client() + pass + + @abstractmethod + def validate(self): + """Trigger publishing which will stop after validation order.""" + + self._send_instance_changes_to_client() + pass + + @abstractmethod + def stop_publish(self): + """Stop publishing can be also used to pause publishing. + + Pause of publishing is possible only if all plugins successfully + finished. + """ + + pass + + @abstractmethod + def run_action(self, plugin_id, action_id): + """Trigger pyblish action on a plugin. + + Args: + plugin_id (str): Id of publish plugin. + action_id (str): Id of publish action. + """ + + pass + + @abstractmethod + def set_comment(self, comment): + """Set comment on pyblish context. + + Set "comment" key on current pyblish.api.Context data. + + Args: + comment (str): Artist's comment. + """ + + pass + + @abstractmethod + def emit_card_message(self, message): + """Emit a card message which can have a lifetime. + + This is for UI purposes. Method can be extended to more arguments + in future e.g. different message timeout or type (color). + + Args: + message (str): Message that will be showed. + """ + + pass diff --git a/openpype/tools/publisher/publish_report_viewer/__init__.py b/openpype/tools/publisher/publish_report_viewer/__init__.py index ce1cc3729c..bf77a6d30b 100644 --- a/openpype/tools/publisher/publish_report_viewer/__init__.py +++ b/openpype/tools/publisher/publish_report_viewer/__init__.py @@ -1,3 +1,5 @@ +from Qt import QtWidgets + from .report_items import ( PublishReport ) @@ -16,4 +18,13 @@ __all__ = ( "PublishReportViewerWidget", "PublishReportViewerWindow", + + "main", ) + + +def main(): + app = QtWidgets.QApplication([]) + window = PublishReportViewerWindow() + window.show() + return app.exec_() diff --git a/openpype/tools/publisher/publish_report_viewer/report_items.py b/openpype/tools/publisher/publish_report_viewer/report_items.py index 8a01569723..206f999bac 100644 --- a/openpype/tools/publisher/publish_report_viewer/report_items.py +++ b/openpype/tools/publisher/publish_report_viewer/report_items.py @@ -79,7 +79,7 @@ class PublishReport: context_data = data["context"] context_data["name"] = "context" - context_data["label"] = context_data["label"] or "Context" + context_data["label"] = context_data.get("label") or "Context" logs = [] plugins_items_by_id = {} diff --git a/openpype/tools/publisher/publish_report_viewer/widgets.py b/openpype/tools/publisher/publish_report_viewer/widgets.py index 61eb814a56..ff388fb277 100644 --- a/openpype/tools/publisher/publish_report_viewer/widgets.py +++ b/openpype/tools/publisher/publish_report_viewer/widgets.py @@ -139,6 +139,9 @@ class PluginLoadReportWidget(QtWidgets.QWidget): class ZoomPlainText(QtWidgets.QPlainTextEdit): + min_point_size = 1.0 + max_point_size = 200.0 + def __init__(self, *args, **kwargs): super(ZoomPlainText, self).__init__(*args, **kwargs) @@ -148,12 +151,12 @@ class ZoomPlainText(QtWidgets.QPlainTextEdit): anim_timer.timeout.connect(self._scaling_callback) self._anim_timer = anim_timer - self._zoom_enabled = False self._scheduled_scalings = 0 self._point_size = None def wheelEvent(self, event): - if not self._zoom_enabled: + modifiers = QtWidgets.QApplication.keyboardModifiers() + if modifiers != QtCore.Qt.ControlModifier: super(ZoomPlainText, self).wheelEvent(event) return @@ -172,33 +175,40 @@ class ZoomPlainText(QtWidgets.QPlainTextEdit): factor = 1.0 + (self._scheduled_scalings / 300) font = self.font() + if self._point_size is None: - self._point_size = font.pointSizeF() + point_size = font.pointSizeF() + else: + point_size = self._point_size - self._point_size *= factor - if self._point_size < 1: - self._point_size = 1.0 + point_size *= factor + min_hit = False + max_hit = False + if point_size < self.min_point_size: + point_size = self.min_point_size + min_hit = True + elif point_size > self.max_point_size: + point_size = self.max_point_size + max_hit = True - font.setPointSizeF(self._point_size) + self._point_size = point_size + + font.setPointSizeF(point_size) # Using 'self.setFont(font)' would not be propagated when stylesheets # are applied on this widget self.setStyleSheet("font-size: {}pt".format(font.pointSize())) - if self._scheduled_scalings > 0: + if ( + (max_hit and self._scheduled_scalings > 0) + or (min_hit and self._scheduled_scalings < 0) + ): + self._scheduled_scalings = 0 + + elif self._scheduled_scalings > 0: self._scheduled_scalings -= 1 else: self._scheduled_scalings += 1 - def keyPressEvent(self, event): - if event.key() == QtCore.Qt.Key_Control: - self._zoom_enabled = True - super(ZoomPlainText, self).keyPressEvent(event) - - def keyReleaseEvent(self, event): - if event.key() == QtCore.Qt.Key_Control: - self._zoom_enabled = False - super(ZoomPlainText, self).keyReleaseEvent(event) - class DetailsWidget(QtWidgets.QWidget): def __init__(self, parent): @@ -331,7 +341,7 @@ class DetailsPopup(QtWidgets.QDialog): self.closed.emit() -class PublishReportViewerWidget(QtWidgets.QWidget): +class PublishReportViewerWidget(QtWidgets.QFrame): def __init__(self, parent=None): super(PublishReportViewerWidget, self).__init__(parent) diff --git a/openpype/tools/publisher/widgets/__init__.py b/openpype/tools/publisher/widgets/__init__.py index 55afc349ff..a02c69d5e0 100644 --- a/openpype/tools/publisher/widgets/__init__.py +++ b/openpype/tools/publisher/widgets/__init__.py @@ -3,35 +3,20 @@ from .icons import ( get_pixmap, get_icon ) -from .border_label_widget import ( - BorderedLabelWidget -) from .widgets import ( - SubsetAttributesWidget, - StopBtn, ResetBtn, ValidateBtn, PublishBtn, - - CreateInstanceBtn, - RemoveInstanceBtn, - ChangeViewBtn ) -from .publish_widget import ( - PublishFrame -) -from .create_dialog import ( - CreateDialog -) - -from .card_view_widgets import ( - InstanceCardView -) - -from .list_view_widgets import ( - InstanceListView +from .help_widget import ( + HelpButton, + HelpDialog, ) +from .publish_frame import PublishFrame +from .tabs_widget import PublisherTabsWidget +from .overview_widget import OverviewWidget +from .validations_widget import ValidationsWidget __all__ = ( @@ -39,22 +24,17 @@ __all__ = ( "get_pixmap", "get_icon", - "SubsetAttributesWidget", - "BorderedLabelWidget", - "StopBtn", "ResetBtn", "ValidateBtn", "PublishBtn", - "CreateInstanceBtn", - "RemoveInstanceBtn", - "ChangeViewBtn", + "HelpButton", + "HelpDialog", "PublishFrame", - "CreateDialog", - - "InstanceCardView", - "InstanceListView", + "PublisherTabsWidget", + "OverviewWidget", + "ValidationsWidget", ) diff --git a/openpype/tools/publisher/widgets/assets_widget.py b/openpype/tools/publisher/widgets/assets_widget.py index 46fdcc6526..996c9029d4 100644 --- a/openpype/tools/publisher/widgets/assets_widget.py +++ b/openpype/tools/publisher/widgets/assets_widget.py @@ -1,6 +1,7 @@ import collections from Qt import QtWidgets, QtCore, QtGui + from openpype.tools.utils import ( PlaceholderLineEdit, RecursiveSortFilterProxyModel, @@ -13,18 +14,17 @@ from openpype.tools.utils.assets_widget import ( ) -class CreateDialogAssetsWidget(SingleSelectAssetsWidget): +class CreateWidgetAssetsWidget(SingleSelectAssetsWidget): current_context_required = QtCore.Signal() header_height_changed = QtCore.Signal(int) def __init__(self, controller, parent): self._controller = controller - super(CreateDialogAssetsWidget, self).__init__(None, parent) + super(CreateWidgetAssetsWidget, self).__init__(None, parent) self.set_refresh_btn_visibility(False) self.set_current_asset_btn_visibility(False) - self._current_asset_name = None self._last_selection = None self._enabled = None @@ -42,11 +42,11 @@ class CreateDialogAssetsWidget(SingleSelectAssetsWidget): self.header_height_changed.emit(height) def resizeEvent(self, event): - super(CreateDialogAssetsWidget, self).resizeEvent(event) + super(CreateWidgetAssetsWidget, self).resizeEvent(event) self._check_header_height() def showEvent(self, event): - super(CreateDialogAssetsWidget, self).showEvent(event) + super(CreateWidgetAssetsWidget, self).showEvent(event) self._check_header_height() def _on_current_asset_click(self): @@ -63,19 +63,19 @@ class CreateDialogAssetsWidget(SingleSelectAssetsWidget): self.select_asset(self._last_selection) def _select_indexes(self, *args, **kwargs): - super(CreateDialogAssetsWidget, self)._select_indexes(*args, **kwargs) + super(CreateWidgetAssetsWidget, self)._select_indexes(*args, **kwargs) if self._enabled: return self._last_selection = self.get_selected_asset_id() self._clear_selection() - def set_current_asset_name(self, asset_name): - self._current_asset_name = asset_name + def update_current_asset(self): # Hide set current asset if there is no one - self.set_current_asset_btn_visibility(asset_name is not None) + asset_name = self._get_current_session_asset() + self.set_current_asset_btn_visibility(bool(asset_name)) def _get_current_session_asset(self): - return self._current_asset_name + return self._controller.current_asset_name def _create_source_model(self): return AssetsHierarchyModel(self._controller) @@ -164,6 +164,16 @@ class AssetsHierarchyModel(QtGui.QStandardItemModel): return item_name in self._items_by_name +class AssetDialogView(QtWidgets.QTreeView): + double_clicked = QtCore.Signal(QtCore.QModelIndex) + + def mouseDoubleClickEvent(self, event): + index = self.indexAt(event.pos()) + if index.isValid(): + self.double_clicked.emit(index) + event.accept() + + class AssetsDialog(QtWidgets.QDialog): """Dialog to select asset for a context of instance.""" @@ -179,7 +189,7 @@ class AssetsDialog(QtWidgets.QDialog): filter_input = PlaceholderLineEdit(self) filter_input.setPlaceholderText("Filter assets..") - asset_view = QtWidgets.QTreeView(self) + asset_view = AssetDialogView(self) asset_view.setModel(proxy_model) asset_view.setHeaderHidden(True) asset_view.setFrameShape(QtWidgets.QFrame.NoFrame) @@ -201,6 +211,7 @@ class AssetsDialog(QtWidgets.QDialog): layout.addWidget(asset_view, 1) layout.addLayout(btns_layout, 0) + asset_view.double_clicked.connect(self._on_ok_clicked) filter_input.textChanged.connect(self._on_filter_change) ok_btn.clicked.connect(self._on_ok_clicked) cancel_btn.clicked.connect(self._on_cancel_clicked) @@ -275,7 +286,7 @@ class AssetsDialog(QtWidgets.QDialog): index = self._asset_view.currentIndex() asset_name = None if index.isValid(): - asset_name = index.data(QtCore.Qt.DisplayRole) + asset_name = index.data(ASSET_NAME_ROLE) self._selected_asset = asset_name self.done(1) diff --git a/openpype/tools/publisher/widgets/border_label_widget.py b/openpype/tools/publisher/widgets/border_label_widget.py index 696a9050b8..8e09dd817e 100644 --- a/openpype/tools/publisher/widgets/border_label_widget.py +++ b/openpype/tools/publisher/widgets/border_label_widget.py @@ -158,8 +158,7 @@ class BorderedLabelWidget(QtWidgets.QFrame): """ def __init__(self, label, parent): super(BorderedLabelWidget, self).__init__(parent) - colors_data = get_objected_colors() - color_value = colors_data.get("border") + color_value = get_objected_colors("border") color = None if color_value: color = color_value.get_qcolor() diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index fa391f4ba0..5daf8059b0 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -41,9 +41,26 @@ from ..constants import ( ) +class SelectionType: + def __init__(self, name): + self.name = name + + def __eq__(self, other): + if isinstance(other, SelectionType): + other = other.name + return self.name == other + + +class SelectionTypes: + clear = SelectionType("clear") + extend = SelectionType("extend") + extend_to = SelectionType("extend_to") + + class GroupWidget(QtWidgets.QWidget): """Widget wrapping instances under group.""" - selected = QtCore.Signal(str, str) + + selected = QtCore.Signal(str, str, SelectionType) active_changed = QtCore.Signal() removed_selected = QtCore.Signal() @@ -72,21 +89,73 @@ class GroupWidget(QtWidgets.QWidget): self._group_icons = group_icons self._widgets_by_id = {} + self._ordered_instance_ids = [] self._label_widget = label_widget self._content_layout = layout + @property + def group_name(self): + """Group which widget represent. + + Returns: + str: Name of group. + """ + + return self._group + + def get_selected_instance_ids(self): + """Selected instance ids. + + Returns: + Set[str]: Instance ids that are selected. + """ + + return { + instance_id + for instance_id, widget in self._widgets_by_id.items() + if widget.is_selected + } + + def get_selected_widgets(self): + """Access to widgets marked as selected. + + Returns: + List[InstanceCardWidget]: Instance widgets that are selected. + """ + + return [ + widget + for instance_id, widget in self._widgets_by_id.items() + if widget.is_selected + ] + + def get_ordered_widgets(self): + """Get instance ids in order as are shown in ui. + + Returns: + List[str]: Instance ids. + """ + + return [ + self._widgets_by_id[instance_id] + for instance_id in self._ordered_instance_ids + ] + def get_widget_by_instance_id(self, instance_id): """Get instance widget by it's id.""" + return self._widgets_by_id.get(instance_id) def update_instance_values(self): """Trigger update on instance widgets.""" + for widget in self._widgets_by_id.values(): widget.update_instance_values() def confirm_remove_instance_id(self, instance_id): """Delete widget by instance id.""" + widget = self._widgets_by_id.pop(instance_id) widget.setVisible(False) self._content_layout.removeWidget(widget) @@ -123,6 +192,7 @@ class GroupWidget(QtWidgets.QWidget): # Sort instances by subset name sorted_subset_names = list(sorted(instances_by_subset_name.keys())) + # Add new instances to widget widget_idx = 1 for subset_names in sorted_subset_names: @@ -135,17 +205,30 @@ class GroupWidget(QtWidgets.QWidget): widget = InstanceCardWidget( instance, group_icon, self ) - widget.selected.connect(self.selected) + widget.selected.connect(self._on_widget_selection) widget.active_changed.connect(self.active_changed) self._widgets_by_id[instance.id] = widget self._content_layout.insertWidget(widget_idx, widget) widget_idx += 1 + ordered_instance_ids = [] + for idx in range(self._content_layout.count()): + if idx > 0: + item = self._content_layout.itemAt(idx) + widget = item.widget() + if widget is not None: + ordered_instance_ids.append(widget.id) + + self._ordered_instance_ids = ordered_instance_ids + + def _on_widget_selection(self, instance_id, group_id, selection_type): + self.selected.emit(instance_id, group_id, selection_type) + class CardWidget(BaseClickableFrame): """Clickable card used as bigger button.""" - selected = QtCore.Signal(str, str) + selected = QtCore.Signal(str, str, SelectionType) # Group identifier of card # - this must be set because if send when mouse is released with card id _group_identifier = None @@ -157,6 +240,12 @@ class CardWidget(BaseClickableFrame): self._selected = False self._id = None + @property + def id(self): + """Id of card.""" + + return self._id + @property def is_selected(self): """Is card selected.""" @@ -173,7 +262,16 @@ class CardWidget(BaseClickableFrame): def _mouse_release_callback(self): """Trigger selected signal.""" - self.selected.emit(self._id, self._group_identifier) + + modifiers = QtWidgets.QApplication.keyboardModifiers() + selection_type = SelectionTypes.clear + if bool(modifiers & QtCore.Qt.ShiftModifier): + selection_type = SelectionTypes.extend_to + + elif bool(modifiers & QtCore.Qt.ControlModifier): + selection_type = SelectionTypes.extend + + self.selected.emit(self._id, self._group_identifier, selection_type) class ContextCardWidget(CardWidget): @@ -351,10 +449,11 @@ class InstanceCardView(AbstractInstanceView): Wrapper of all widgets in card view. """ + def __init__(self, controller, parent): super(InstanceCardView, self).__init__(parent) - self.controller = controller + self._controller = controller scroll_area = QtWidgets.QScrollArea(self) scroll_area.setWidgetResizable(True) @@ -381,11 +480,12 @@ class InstanceCardView(AbstractInstanceView): self._content_layout = content_layout self._content_widget = content_widget - self._widgets_by_group = {} self._context_widget = None + self._widgets_by_group = {} + self._ordered_groups = [] - self._selected_group = None - self._selected_instance_id = None + self._explicitly_selected_instance_ids = [] + self._explicitly_selected_groups = [] self.setSizePolicy( QtWidgets.QSizePolicy.Minimum, @@ -405,21 +505,30 @@ class InstanceCardView(AbstractInstanceView): result.setWidth(width) return result - def _get_selected_widget(self): - if self._selected_instance_id == CONTEXT_ID: - return self._context_widget + def _get_selected_widgets(self): + output = [] + if ( + self._context_widget is not None + and self._context_widget.is_selected + ): + output.append(self._context_widget) - group_widget = self._widgets_by_group.get( - self._selected_group - ) - if group_widget is not None: - widget = group_widget.get_widget_by_instance_id( - self._selected_instance_id - ) - if widget is not None: - return widget + for group_widget in self._widgets_by_group.values(): + for widget in group_widget.get_selected_widgets(): + output.append(widget) + return output - return None + def _get_selected_instance_ids(self): + output = [] + if ( + self._context_widget is not None + and self._context_widget.is_selected + ): + output.append(CONTEXT_ID) + + for group_widget in self._widgets_by_group.values(): + output.extend(group_widget.get_selected_instance_ids()) + return output def refresh(self): """Refresh instances in view based on CreatedContext.""" @@ -435,12 +544,10 @@ class InstanceCardView(AbstractInstanceView): self.selection_changed.emit() self._content_layout.insertWidget(0, widget) - self.select_item(CONTEXT_ID, None) - # Prepare instances by group and identifiers by group instances_by_group = collections.defaultdict(list) identifiers_by_group = collections.defaultdict(set) - for instance in self.controller.instances: + for instance in self._controller.instances.values(): group_name = instance.group_label instances_by_group[group_name].append(instance) identifiers_by_group[group_name].add( @@ -452,15 +559,17 @@ class InstanceCardView(AbstractInstanceView): if group_name in instances_by_group: continue - if group_name == self._selected_group: - self._on_remove_selected() widget = self._widgets_by_group.pop(group_name) widget.setVisible(False) self._content_layout.removeWidget(widget) widget.deleteLater() + if group_name in self._explicitly_selected_groups: + self._explicitly_selected_groups.remove(group_name) + # Sort groups sorted_group_names = list(sorted(instances_by_group.keys())) + # Keep track of widget indexes # - we start with 1 because Context item as at the top widget_idx = 1 @@ -469,7 +578,7 @@ class InstanceCardView(AbstractInstanceView): group_widget = self._widgets_by_group[group_name] else: group_icons = { - idenfier: self.controller.get_icon_for_family(idenfier) + idenfier: self._controller.get_creator_icon(idenfier) for idenfier in identifiers_by_group[group_name] } @@ -478,9 +587,6 @@ class InstanceCardView(AbstractInstanceView): ) group_widget.active_changed.connect(self._on_active_changed) group_widget.selected.connect(self._on_widget_selection) - group_widget.removed_selected.connect( - self._on_remove_selected - ) self._content_layout.insertWidget(widget_idx, group_widget) self._widgets_by_group[group_name] = group_widget @@ -489,6 +595,16 @@ class InstanceCardView(AbstractInstanceView): instances_by_group[group_name] ) + ordered_group_names = [""] + for idx in range(self._content_layout.count()): + if idx > 0: + item = self._content_layout.itemAt(idx) + group_widget = item.widget() + if group_widget is not None: + ordered_group_names.append(group_widget.group_name) + + self._ordered_groups = ordered_group_names + def refresh_instance_states(self): """Trigger update of instances on group widgets.""" for widget in self._widgets_by_group.values(): @@ -497,10 +613,7 @@ class InstanceCardView(AbstractInstanceView): def _on_active_changed(self): self.active_changed.emit() - def _on_widget_selection(self, instance_id, group_name): - self.select_item(instance_id, group_name) - - def select_item(self, instance_id, group_name): + def _on_widget_selection(self, instance_id, group_name, selection_type): """Select specific item by instance id. Pass `CONTEXT_ID` as instance id and empty string as group to select @@ -512,34 +625,318 @@ class InstanceCardView(AbstractInstanceView): group_widget = self._widgets_by_group[group_name] new_widget = group_widget.get_widget_by_instance_id(instance_id) - selected_widget = self._get_selected_widget() - if new_widget is selected_widget: - return - - if selected_widget is not None: - selected_widget.set_selected(False) - - self._selected_instance_id = instance_id - self._selected_group = group_name - if new_widget is not None: - new_widget.set_selected(True) + if selection_type is SelectionTypes.clear: + self._select_item_clear(instance_id, group_name, new_widget) + elif selection_type is SelectionTypes.extend: + self._select_item_extend(instance_id, group_name, new_widget) + elif selection_type is SelectionTypes.extend_to: + self._select_item_extend_to(instance_id, group_name, new_widget) self.selection_changed.emit() - def _on_remove_selected(self): - selected_widget = self._get_selected_widget() - if selected_widget is None: - self._on_widget_selection(CONTEXT_ID, None) + def _select_item_clear(self, instance_id, group_name, new_widget): + """Select specific item by instance id and clear previous selection. + + Pass `CONTEXT_ID` as instance id and empty string as group to select + global context item. + """ + + selected_widgets = self._get_selected_widgets() + for widget in selected_widgets: + if widget.id != instance_id: + widget.set_selected(False) + + self._explicitly_selected_groups = [group_name] + self._explicitly_selected_instance_ids = [instance_id] + + if new_widget is not None: + new_widget.set_selected(True) + + def _select_item_extend(self, instance_id, group_name, new_widget): + """Add/Remove single item to/from current selection. + + If item is already selected the selection is removed. + """ + + self._explicitly_selected_instance_ids = ( + self._get_selected_instance_ids() + ) + if new_widget.is_selected: + self._explicitly_selected_instance_ids.remove(instance_id) + new_widget.set_selected(False) + remove_group = False + if instance_id == CONTEXT_ID: + remove_group = True + else: + group_widget = self._widgets_by_group[group_name] + if not group_widget.get_selected_widgets(): + remove_group = True + + if remove_group: + self._explicitly_selected_groups.remove(group_name) + return + + self._explicitly_selected_instance_ids.append(instance_id) + if group_name in self._explicitly_selected_groups: + self._explicitly_selected_groups.remove(group_name) + self._explicitly_selected_groups.append(group_name) + new_widget.set_selected(True) + + def _select_item_extend_to(self, instance_id, group_name, new_widget): + """Extend selected items to specific instance id. + + This method is handling Shift+click selection of widgets. Selection + is not stored to explicit selection items. That's because user can + shift select again and it should use last explicit selected item as + source item for selection. + + Items selected via this function can get to explicit selection only if + selection is extended by one specific item ('_select_item_extend'). + From that moment the selection is locked to new last explicit selected + item. + + It's required to traverse through group widgets in their UI order and + through their instances in UI order. All explicitly selected items + must not change their selection state during this function. Passed + instance id can be above or under last selected item so a start item + and end item must be found to be able know which direction is selection + happening. + """ + + # Start group name (in '_ordered_groups') + start_group = None + # End group name (in '_ordered_groups') + end_group = None + # Instance id of first selected item + start_instance_id = None + # Instance id of last selected item + end_instance_id = None + + # Get previously selected group by explicit selected groups + previous_group = None + if self._explicitly_selected_groups: + previous_group = self._explicitly_selected_groups[-1] + + # Find last explicitly selected instance id + previous_last_selected_id = None + if self._explicitly_selected_instance_ids: + previous_last_selected_id = ( + self._explicitly_selected_instance_ids[-1] + ) + + # If last instance id was not found or available then last selected + # group is also invalid. + # NOTE: This probably never happen? + if previous_last_selected_id is None: + previous_group = None + + # Check if previously selected group is available and find out if + # new instance group is above or under previous selection + # - based on these information are start/end group/instance filled + if previous_group in self._ordered_groups: + new_idx = self._ordered_groups.index(group_name) + prev_idx = self._ordered_groups.index(previous_group) + if new_idx < prev_idx: + start_group = group_name + end_group = previous_group + start_instance_id = instance_id + end_instance_id = previous_last_selected_id + else: + start_group = previous_group + end_group = group_name + start_instance_id = previous_last_selected_id + end_instance_id = instance_id + + # If start group is not set then use context item group name + if start_group is None: + start_group = "" + + # If start instance id is not filled then use context id (similar to + # group) + if start_instance_id is None: + start_instance_id = CONTEXT_ID + + # If end group is not defined then use passed group name + # - this can be happen when previous group was not selected + # - when this happens the selection will probably happen from context + # item to item selected by user + if end_group is None: + end_group = group_name + + # If end instance is not filled then use instance selected by user + if end_instance_id is None: + end_instance_id = instance_id + + # Start and end group are the same + # - a different logic is needed in that case + same_group = start_group == end_group + + # Process known information and change selection of items + passed_start_group = False + passed_end_group = False + # Go through ordered groups (from top to bottom) and change selection + for name in self._ordered_groups: + # Prepare sorted instance widgets + if name == "": + sorted_widgets = [self._context_widget] + else: + group_widget = self._widgets_by_group[name] + sorted_widgets = group_widget.get_ordered_widgets() + + # Change selection based on explicit selection if start group + # was not passed yet + if not passed_start_group: + if name != start_group: + for widget in sorted_widgets: + widget.set_selected( + widget.id in self._explicitly_selected_instance_ids + ) + continue + + # Change selection based on explicit selection if end group + # already passed + if passed_end_group: + for widget in sorted_widgets: + widget.set_selected( + widget.id in self._explicitly_selected_instance_ids + ) + continue + + # Start group is already passed and end group was not yet hit + if same_group: + passed_start_group = True + passed_end_group = True + passed_start_instance = False + passed_end_instance = False + for widget in sorted_widgets: + if not passed_start_instance: + if widget.id in (start_instance_id, end_instance_id): + if widget.id != start_instance_id: + # Swap start/end instance if start instance is + # after end + # - fix 'passed_end_instance' check + start_instance_id, end_instance_id = ( + end_instance_id, start_instance_id + ) + passed_start_instance = True + + # Find out if widget should be selected + select = False + if passed_end_instance: + select = False + + elif passed_start_instance: + select = True + + # Check if instance is in explicitly selected items if + # should ont be selected + if ( + not select + and widget.id in self._explicitly_selected_instance_ids + ): + select = True + + widget.set_selected(select) + + if ( + not passed_end_instance + and widget.id == end_instance_id + ): + passed_end_instance = True + + elif name == start_group: + # First group from which selection should start + # - look for start instance first from which the selection + # should happen + passed_start_group = True + passed_start_instance = False + for widget in sorted_widgets: + if widget.id == start_instance_id: + passed_start_instance = True + + select = False + # Check if passed start instance or instance is + # in explicitly selected items to be selected + if ( + passed_start_instance + or widget.id in self._explicitly_selected_instance_ids + ): + select = True + widget.set_selected(select) + + elif name == end_group: + # Last group where selection should happen + # - look for end instance first after which the selection + # should stop + passed_end_group = True + passed_end_instance = False + for widget in sorted_widgets: + select = False + # Check if not yet passed end instance or if instance is + # in explicitly selected items to be selected + if ( + not passed_end_instance + or widget.id in self._explicitly_selected_instance_ids + ): + select = True + + widget.set_selected(select) + + if widget.id == end_instance_id: + passed_end_instance = True + + else: + # Just select everything between start and end group + for widget in sorted_widgets: + widget.set_selected(True) def get_selected_items(self): """Get selected instance ids and context.""" instances = [] - context_selected = False - selected_widget = self._get_selected_widget() - if selected_widget is self._context_widget: - context_selected = True + selected_widgets = self._get_selected_widgets() - elif selected_widget is not None: - instances.append(selected_widget.instance) + context_selected = False + for widget in selected_widgets: + if widget is self._context_widget: + context_selected = True + else: + instances.append(widget.id) return instances, context_selected + + def set_selected_items(self, instance_ids, context_selected): + s_instance_ids = set(instance_ids) + cur_ids, cur_context = self.get_selected_items() + if ( + set(cur_ids) == s_instance_ids + and cur_context == context_selected + ): + return + + selected_groups = [] + selected_instances = [] + if context_selected: + selected_groups.append("") + selected_instances.append(CONTEXT_ID) + + self._context_widget.set_selected(context_selected) + + for group_name in self._ordered_groups: + if group_name == "": + continue + + group_widget = self._widgets_by_group[group_name] + group_selected = False + for widget in group_widget.get_ordered_widgets(): + select = False + if widget.id in s_instance_ids: + selected_instances.append(widget.id) + group_selected = True + select = True + widget.set_selected(select) + + if group_selected: + selected_groups.append(group_name) + + self._explicitly_selected_groups = selected_groups + self._explicitly_selected_instance_ids = selected_instances diff --git a/openpype/tools/publisher/widgets/create_dialog.py b/openpype/tools/publisher/widgets/create_widget.py similarity index 52% rename from openpype/tools/publisher/widgets/create_dialog.py rename to openpype/tools/publisher/widgets/create_widget.py index d4740b2493..10cf39675e 100644 --- a/openpype/tools/publisher/widgets/create_dialog.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -1,30 +1,22 @@ import sys import re import traceback -import copy -import qtawesome -try: - import commonmark -except Exception: - commonmark = None from Qt import QtWidgets, QtCore, QtGui -from openpype.client import get_asset_by_name, get_subsets -from openpype.lib import TaskNotSetError from openpype.pipeline.create import ( CreatorError, - SUBSET_NAME_ALLOWED_SYMBOLS -) -from openpype.tools.utils import ( - ErrorMessageBox, - MessageOverlayObject, - ClickableFrame, + SUBSET_NAME_ALLOWED_SYMBOLS, + TaskNotSetError, ) +from openpype.tools.utils import ErrorMessageBox -from .widgets import IconValuePixmapLabel -from .assets_widget import CreateDialogAssetsWidget -from .tasks_widget import CreateDialogTasksWidget +from .widgets import ( + IconValuePixmapLabel, + CreateBtn, +) +from .assets_widget import CreateWidgetAssetsWidget +from .tasks_widget import CreateWidgetTasksWidget from .precreate_widget import PreCreateWidget from ..constants import ( VARIANT_TOOLTIP, @@ -118,8 +110,6 @@ class CreateErrorMessageBox(ErrorMessageBox): # TODO add creator identifier/label to details class CreatorShortDescWidget(QtWidgets.QWidget): - height_changed = QtCore.Signal(int) - def __init__(self, parent=None): super(CreatorShortDescWidget, self).__init__(parent=parent) @@ -158,155 +148,31 @@ class CreatorShortDescWidget(QtWidgets.QWidget): self._family_label = family_label self._description_label = description_label - self._last_height = None - - def _check_height_change(self): - height = self.height() - if height != self._last_height: - self._last_height = height - self.height_changed.emit(height) - - def showEvent(self, event): - super(CreatorShortDescWidget, self).showEvent(event) - self._check_height_change() - - def resizeEvent(self, event): - super(CreatorShortDescWidget, self).resizeEvent(event) - self._check_height_change() - - def set_plugin(self, plugin=None): - if not plugin: + def set_creator_item(self, creator_item=None): + if not creator_item: self._icon_widget.set_icon_def(None) self._family_label.setText("") self._description_label.setText("") return - plugin_icon = plugin.get_icon() - description = plugin.get_description() or "" + plugin_icon = creator_item.icon + description = creator_item.description or "" self._icon_widget.set_icon_def(plugin_icon) - self._family_label.setText("{}".format(plugin.family)) + self._family_label.setText("{}".format(creator_item.family)) self._family_label.setTextInteractionFlags(QtCore.Qt.NoTextInteraction) self._description_label.setText(description) -class HelpButton(ClickableFrame): - resized = QtCore.Signal(int) - question_mark_icon_name = "fa.question" - help_icon_name = "fa.question-circle" - hide_icon_name = "fa.angle-left" - - def __init__(self, *args, **kwargs): - super(HelpButton, self).__init__(*args, **kwargs) - self.setObjectName("CreateDialogHelpButton") - - question_mark_label = QtWidgets.QLabel(self) - help_widget = QtWidgets.QWidget(self) - - help_question = QtWidgets.QLabel(help_widget) - help_label = QtWidgets.QLabel("Help", help_widget) - hide_icon = QtWidgets.QLabel(help_widget) - - help_layout = QtWidgets.QHBoxLayout(help_widget) - help_layout.setContentsMargins(0, 0, 5, 0) - help_layout.addWidget(help_question, 0) - help_layout.addWidget(help_label, 0) - help_layout.addStretch(1) - help_layout.addWidget(hide_icon, 0) - - layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.setSpacing(0) - layout.addWidget(question_mark_label, 0) - layout.addWidget(help_widget, 1) - - help_widget.setVisible(False) - - self._question_mark_label = question_mark_label - self._help_widget = help_widget - self._help_question = help_question - self._hide_icon = hide_icon - - self._expanded = None - self.set_expanded() - - def set_expanded(self, expanded=None): - if self._expanded is expanded: - if expanded is not None: - return - expanded = False - self._expanded = expanded - self._help_widget.setVisible(expanded) - self._update_content() - - def _update_content(self): - width = self.get_icon_width() - if self._expanded: - question_mark_pix = QtGui.QPixmap(width, width) - question_mark_pix.fill(QtCore.Qt.transparent) - - else: - question_mark_icon = qtawesome.icon( - self.question_mark_icon_name, color=QtCore.Qt.white - ) - question_mark_pix = question_mark_icon.pixmap(width, width) - - hide_icon = qtawesome.icon( - self.hide_icon_name, color=QtCore.Qt.white - ) - help_question_icon = qtawesome.icon( - self.help_icon_name, color=QtCore.Qt.white - ) - self._question_mark_label.setPixmap(question_mark_pix) - self._question_mark_label.setMaximumWidth(width) - self._hide_icon.setPixmap(hide_icon.pixmap(width, width)) - self._help_question.setPixmap(help_question_icon.pixmap(width, width)) - - def get_icon_width(self): - metrics = self.fontMetrics() - return metrics.height() - - def set_pos_and_size(self, pos_x, pos_y, width, height): - update_icon = self.height() != height - self.move(pos_x, pos_y) - self.resize(width, height) - - if update_icon: - self._update_content() - self.updateGeometry() - - def showEvent(self, event): - super(HelpButton, self).showEvent(event) - self.resized.emit(self.height()) - - def resizeEvent(self, event): - super(HelpButton, self).resizeEvent(event) - self.resized.emit(self.height()) - - -class CreateDialog(QtWidgets.QDialog): - default_size = (1000, 560) - - def __init__( - self, controller, asset_name=None, task_name=None, parent=None - ): - super(CreateDialog, self).__init__(parent) +class CreateWidget(QtWidgets.QWidget): + def __init__(self, controller, parent=None): + super(CreateWidget, self).__init__(parent) self.setWindowTitle("Create new instance") - self.controller = controller + self._controller = controller - if asset_name is None: - asset_name = self.dbcon.Session.get("AVALON_ASSET") - - if task_name is None: - task_name = self.dbcon.Session.get("AVALON_TASK") - - self._asset_name = asset_name - self._task_name = task_name - - self._last_pos = None - self._asset_doc = None + self._asset_name = None self._subset_names = None self._selected_creator = None @@ -318,12 +184,12 @@ class CreateDialog(QtWidgets.QDialog): self._name_pattern = name_pattern self._compiled_name_pattern = re.compile(name_pattern) - overlay_object = MessageOverlayObject(self) + main_splitter_widget = QtWidgets.QSplitter(self) - context_widget = QtWidgets.QWidget(self) + context_widget = QtWidgets.QWidget(main_splitter_widget) - assets_widget = CreateDialogAssetsWidget(controller, context_widget) - tasks_widget = CreateDialogTasksWidget(controller, context_widget) + assets_widget = CreateWidgetAssetsWidget(controller, context_widget) + tasks_widget = CreateWidgetTasksWidget(controller, context_widget) context_layout = QtWidgets.QVBoxLayout(context_widget) context_layout.setContentsMargins(0, 0, 0, 0) @@ -332,21 +198,44 @@ class CreateDialog(QtWidgets.QDialog): context_layout.addWidget(tasks_widget, 1) # --- Creators view --- - creators_header_widget = QtWidgets.QWidget(self) - header_label_widget = QtWidgets.QLabel( - "Choose family:", creators_header_widget - ) - creators_header_layout = QtWidgets.QHBoxLayout(creators_header_widget) - creators_header_layout.setContentsMargins(0, 0, 0, 0) - creators_header_layout.addWidget(header_label_widget, 1) + creators_widget = QtWidgets.QWidget(main_splitter_widget) - creators_view = QtWidgets.QListView(self) + creator_short_desc_widget = CreatorShortDescWidget(creators_widget) + + attr_separator_widget = QtWidgets.QWidget(creators_widget) + attr_separator_widget.setObjectName("Separator") + attr_separator_widget.setMinimumHeight(1) + attr_separator_widget.setMaximumHeight(1) + + creators_splitter = QtWidgets.QSplitter(creators_widget) + + creators_view_widget = QtWidgets.QWidget(creators_splitter) + + creator_view_label = QtWidgets.QLabel( + "Choose publish type", creators_view_widget + ) + + creators_view = QtWidgets.QListView(creators_view_widget) creators_model = QtGui.QStandardItemModel() creators_sort_model = QtCore.QSortFilterProxyModel() creators_sort_model.setSourceModel(creators_model) creators_view.setModel(creators_sort_model) - variant_widget = VariantInputsWidget(self) + creators_view_layout = QtWidgets.QVBoxLayout(creators_view_widget) + creators_view_layout.setContentsMargins(0, 0, 0, 0) + creators_view_layout.addWidget(creator_view_label, 0) + creators_view_layout.addWidget(creators_view, 1) + + # --- Creator attr defs --- + creators_attrs_widget = QtWidgets.QWidget(creators_splitter) + + variant_subset_label = QtWidgets.QLabel( + "Create options", creators_attrs_widget + ) + + variant_subset_widget = QtWidgets.QWidget(creators_attrs_widget) + # Variant and subset input + variant_widget = VariantInputsWidget(creators_attrs_widget) variant_input = QtWidgets.QLineEdit(variant_widget) variant_input.setObjectName("VariantInput") @@ -365,39 +254,20 @@ class CreateDialog(QtWidgets.QDialog): variant_layout.addWidget(variant_input, 1) variant_layout.addWidget(variant_hints_btn, 0, QtCore.Qt.AlignVCenter) - subset_name_input = QtWidgets.QLineEdit(self) + subset_name_input = QtWidgets.QLineEdit(variant_subset_widget) subset_name_input.setEnabled(False) - form_layout = QtWidgets.QFormLayout() - form_layout.addRow("Variant:", variant_widget) - form_layout.addRow("Subset:", subset_name_input) - - mid_widget = QtWidgets.QWidget(self) - mid_layout = QtWidgets.QVBoxLayout(mid_widget) - mid_layout.setContentsMargins(0, 0, 0, 0) - mid_layout.addWidget(creators_header_widget, 0) - mid_layout.addWidget(creators_view, 1) - mid_layout.addLayout(form_layout, 0) - # ------------ - - # --- Creator short info and attr defs --- - creator_attrs_widget = QtWidgets.QWidget(self) - - creator_short_desc_widget = CreatorShortDescWidget( - creator_attrs_widget - ) - - attr_separator_widget = QtWidgets.QWidget(self) - attr_separator_widget.setObjectName("Separator") - attr_separator_widget.setMinimumHeight(1) - attr_separator_widget.setMaximumHeight(1) + variant_subset_layout = QtWidgets.QFormLayout(variant_subset_widget) + variant_subset_layout.setContentsMargins(0, 0, 0, 0) + variant_subset_layout.addRow("Variant", variant_widget) + variant_subset_layout.addRow("Subset", subset_name_input) # Precreate attributes widget - pre_create_widget = PreCreateWidget(creator_attrs_widget) + pre_create_widget = PreCreateWidget(creators_attrs_widget) # Create button - create_btn_wrapper = QtWidgets.QWidget(creator_attrs_widget) - create_btn = QtWidgets.QPushButton("Create", create_btn_wrapper) + create_btn_wrapper = QtWidgets.QWidget(creators_attrs_widget) + create_btn = CreateBtn(create_btn_wrapper) create_btn.setEnabled(False) create_btn_wrap_layout = QtWidgets.QHBoxLayout(create_btn_wrapper) @@ -405,79 +275,45 @@ class CreateDialog(QtWidgets.QDialog): create_btn_wrap_layout.addStretch(1) create_btn_wrap_layout.addWidget(create_btn, 0) - creator_attrs_layout = QtWidgets.QVBoxLayout(creator_attrs_widget) - creator_attrs_layout.setContentsMargins(0, 0, 0, 0) - creator_attrs_layout.addWidget(creator_short_desc_widget, 0) - creator_attrs_layout.addWidget(attr_separator_widget, 0) - creator_attrs_layout.addWidget(pre_create_widget, 1) - creator_attrs_layout.addWidget(create_btn_wrapper, 0) - # ------------------------------------- + creators_attrs_layout = QtWidgets.QVBoxLayout(creators_attrs_widget) + creators_attrs_layout.setContentsMargins(0, 0, 0, 0) + creators_attrs_layout.addWidget(variant_subset_label, 0) + creators_attrs_layout.addWidget(variant_subset_widget, 0) + creators_attrs_layout.addWidget(pre_create_widget, 1) + creators_attrs_layout.addWidget(create_btn_wrapper, 0) + + creators_splitter.addWidget(creators_view_widget) + creators_splitter.addWidget(creators_attrs_widget) + creators_splitter.setStretchFactor(0, 1) + creators_splitter.setStretchFactor(1, 2) + + creators_layout = QtWidgets.QVBoxLayout(creators_widget) + creators_layout.setContentsMargins(0, 0, 0, 0) + creators_layout.addWidget(creator_short_desc_widget, 0) + creators_layout.addWidget(attr_separator_widget, 0) + creators_layout.addWidget(creators_splitter, 1) + # ------------ # --- Detailed information about creator --- # Detailed description of creator - detail_description_widget = QtWidgets.QWidget(self) - - detail_placoholder_widget = QtWidgets.QWidget( - detail_description_widget - ) - detail_placoholder_widget.setAttribute( - QtCore.Qt.WA_TranslucentBackground - ) - - detail_description_input = QtWidgets.QTextEdit( - detail_description_widget - ) - detail_description_input.setObjectName("CreatorDetailedDescription") - detail_description_input.setTextInteractionFlags( - QtCore.Qt.TextBrowserInteraction - ) - - detail_description_layout = QtWidgets.QVBoxLayout( - detail_description_widget - ) - detail_description_layout.setContentsMargins(0, 0, 0, 0) - detail_description_layout.setSpacing(0) - detail_description_layout.addWidget(detail_placoholder_widget, 0) - detail_description_layout.addWidget(detail_description_input, 1) - - detail_description_widget.setVisible(False) + # TODO this has no way how can be showed now # ------------------------------------------- - splitter_widget = QtWidgets.QSplitter(self) - splitter_widget.addWidget(context_widget) - splitter_widget.addWidget(mid_widget) - splitter_widget.addWidget(creator_attrs_widget) - splitter_widget.addWidget(detail_description_widget) - splitter_widget.setStretchFactor(0, 1) - splitter_widget.setStretchFactor(1, 1) - splitter_widget.setStretchFactor(2, 1) - splitter_widget.setStretchFactor(3, 1) + main_splitter_widget.addWidget(context_widget) + main_splitter_widget.addWidget(creators_widget) + main_splitter_widget.setStretchFactor(0, 1) + main_splitter_widget.setStretchFactor(1, 3) - layout = QtWidgets.QHBoxLayout(self) - layout.addWidget(splitter_widget, 1) - - # Floating help button - # - Create this button as last to be fully visible - help_btn = HelpButton(self) + layout = QtWidgets.QVBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.addWidget(main_splitter_widget, 1) prereq_timer = QtCore.QTimer() prereq_timer.setInterval(50) prereq_timer.setSingleShot(True) - desc_width_anim_timer = QtCore.QTimer() - desc_width_anim_timer.setInterval(10) - prereq_timer.timeout.connect(self._invalidate_prereq) - desc_width_anim_timer.timeout.connect(self._on_desc_animation) - - help_btn.clicked.connect(self._on_help_btn) - help_btn.resized.connect(self._on_help_btn_resize) - - assets_widget.header_height_changed.connect( - self._on_asset_filter_height_change - ) - create_btn.clicked.connect(self._on_create) variant_widget.resized.connect(self._on_variant_widget_resize) variant_input.returnPressed.connect(self._on_create) @@ -492,16 +328,14 @@ class CreateDialog(QtWidgets.QDialog): self._on_current_session_context_request ) tasks_widget.task_changed.connect(self._on_task_change) - creator_short_desc_widget.height_changed.connect( - self._on_description_height_change + + controller.event_system.add_callback( + "plugins.refresh.finished", self._on_plugins_refresh ) - splitter_widget.splitterMoved.connect(self._on_splitter_move) - controller.add_plugins_refresh_callback(self._on_plugins_refresh) + self._main_splitter_widget = main_splitter_widget - self._overlay_object = overlay_object - - self._splitter_widget = splitter_widget + self._creators_splitter = creators_splitter self._context_widget = context_widget self._assets_widget = assets_widget @@ -514,7 +348,6 @@ class CreateDialog(QtWidgets.QDialog): self.variant_hints_menu = variant_hints_menu self.variant_hints_group = variant_hints_group - self._creators_header_widget = creators_header_widget self._creators_model = creators_model self._creators_sort_model = creators_sort_model self._creators_view = creators_view @@ -524,26 +357,16 @@ class CreateDialog(QtWidgets.QDialog): self._pre_create_widget = pre_create_widget self._attr_separator_widget = attr_separator_widget - self._detail_placoholder_widget = detail_placoholder_widget - self._detail_description_widget = detail_description_widget - self._detail_description_input = detail_description_input - self._help_btn = help_btn - self._prereq_timer = prereq_timer self._first_show = True - # Description animation - self._description_size_policy = detail_description_widget.sizePolicy() - self._desc_width_anim_timer = desc_width_anim_timer - self._desc_widget_step = 0 - self._last_description_width = None - self._last_full_width = 0 - self._expected_description_width = 0 - self._last_desc_max_width = None - self._other_widgets_widths = [] + @property + def current_asset_name(self): + return self._controller.current_asset_name - def _emit_message(self, message): - self._overlay_object.add_message(message) + @property + def current_task_name(self): + return self._controller.current_task_name def _context_change_is_enabled(self): return self._context_widget.isEnabled() @@ -554,8 +377,8 @@ class CreateDialog(QtWidgets.QDialog): asset_name = self._assets_widget.get_selected_asset_name() if asset_name is None: - asset_name = self._asset_name - return asset_name + asset_name = self.current_asset_name + return asset_name or None def _get_task_name(self): task_name = None @@ -566,13 +389,9 @@ class CreateDialog(QtWidgets.QDialog): task_name = self._tasks_widget.get_selected_task_name() if not task_name: - task_name = self._task_name + task_name = self.current_task_name return task_name - @property - def dbcon(self): - return self.controller.dbcon - def _set_context_enabled(self, enabled): self._assets_widget.set_enabled(enabled) self._tasks_widget.set_enabled(enabled) @@ -601,7 +420,7 @@ class CreateDialog(QtWidgets.QDialog): # data self._refresh_creators() - self._assets_widget.set_current_asset_name(self._asset_name) + self._assets_widget.update_current_asset() self._assets_widget.select_asset_by_name(asset_name) self._tasks_widget.set_asset_name(asset_name) self._tasks_widget.select_task_name(task_name) @@ -611,10 +430,6 @@ class CreateDialog(QtWidgets.QDialog): def _invalidate_prereq_deffered(self): self._prereq_timer.start() - def _on_asset_filter_height_change(self, height): - self._creators_header_widget.setMinimumHeight(height) - self._creators_header_widget.setMaximumHeight(height) - def _invalidate_prereq(self): prereq_available = True creator_btn_tooltips = [] @@ -627,7 +442,7 @@ class CreateDialog(QtWidgets.QDialog): prereq_available = False creator_btn_tooltips.append("Creator is not selected") - if self._context_change_is_enabled() and self._asset_doc is None: + if self._context_change_is_enabled() and self._asset_name is None: # QUESTION how to handle invalid asset? prereq_available = False creator_btn_tooltips.append("Context is not selected") @@ -651,30 +466,19 @@ class CreateDialog(QtWidgets.QDialog): asset_name = self._get_asset_name() # Skip if asset did not change - if self._asset_doc and self._asset_doc["name"] == asset_name: + if self._asset_name and self._asset_name == asset_name: return - # Make sure `_asset_doc` and `_subset_names` variables are reset - self._asset_doc = None + # Make sure `_asset_name` and `_subset_names` variables are reset + self._asset_name = asset_name self._subset_names = None if asset_name is None: return - project_name = self.dbcon.active_project() - asset_doc = get_asset_by_name(project_name, asset_name) - self._asset_doc = asset_doc + subset_names = self._controller.get_existing_subset_names(asset_name) - if asset_doc: - asset_id = asset_doc["_id"] - subset_docs = get_subsets( - project_name, asset_ids=[asset_id], fields=["name"] - ) - self._subset_names = { - subset_doc["name"] - for subset_doc in subset_docs - } - - if not asset_doc: + self._subset_names = subset_names + if subset_names is None: self.subset_name_input.setText("< Asset is not set >") def _refresh_creators(self): @@ -689,7 +493,10 @@ class CreateDialog(QtWidgets.QDialog): # Add new families new_creators = set() - for identifier, creator in self.controller.manual_creators.items(): + for identifier, creator_item in self._controller.creator_items.items(): + if creator_item.creator_type != "artist": + continue + # TODO add details about creator new_creators.add(identifier) if identifier in existing_items: @@ -701,10 +508,9 @@ class CreateDialog(QtWidgets.QDialog): ) self._creators_model.appendRow(item) - label = creator.label or identifier - item.setData(label, QtCore.Qt.DisplayRole) + item.setData(creator_item.label, QtCore.Qt.DisplayRole) item.setData(identifier, CREATOR_IDENTIFIER_ROLE) - item.setData(creator.family, FAMILY_ROLE) + item.setData(creator_item.family, FAMILY_ROLE) # Remove families that are no more available for identifier in (old_creators - new_creators): @@ -729,8 +535,7 @@ class CreateDialog(QtWidgets.QDialog): def _on_plugins_refresh(self): # Trigger refresh only if is visible - if self.isVisible(): - self.refresh() + self.refresh() def _on_asset_change(self): self._refresh_asset() @@ -746,14 +551,9 @@ class CreateDialog(QtWidgets.QDialog): def _on_current_session_context_request(self): self._assets_widget.set_current_session_asset() - if self._task_name: - self._tasks_widget.select_task_name(self._task_name) - - def _on_description_height_change(self): - # Use separator's 'y' position as height - height = self._attr_separator_widget.y() - self._detail_placoholder_widget.setMinimumHeight(height) - self._detail_placoholder_widget.setMaximumHeight(height) + task_name = self.current_task_name + if task_name: + self._tasks_widget.select_task_name(task_name) def _on_creator_item_change(self, new_index, _old_index): identifier = None @@ -761,221 +561,53 @@ class CreateDialog(QtWidgets.QDialog): identifier = new_index.data(CREATOR_IDENTIFIER_ROLE) self._set_creator_by_identifier(identifier) - def _update_help_btn(self): - short_desc_rect = self._creator_short_desc_widget.rect() - - # point = short_desc_rect.topRight() - point = short_desc_rect.center() - mapped_point = self._creator_short_desc_widget.mapTo(self, point) - # pos_y = mapped_point.y() - center_pos_y = mapped_point.y() - icon_width = self._help_btn.get_icon_width() - - _height = int(icon_width * 2.5) - height = min(_height, short_desc_rect.height()) - pos_y = center_pos_y - int(height / 2) - - pos_x = self.width() - icon_width - if self._detail_placoholder_widget.isVisible(): - pos_x -= ( - self._detail_placoholder_widget.width() - + self._splitter_widget.handle(3).width() - ) - - width = self.width() - pos_x - - self._help_btn.set_pos_and_size( - max(0, pos_x), max(0, pos_y), - width, height + def _set_creator_detailed_text(self, creator_item): + # TODO implement + description = "" + if creator_item is not None: + description = creator_item.detailed_description or description + self._controller.event_system.emit( + "show.detailed.help", + { + "message": description + }, + "create.widget" ) - def _on_help_btn_resize(self, height): - if self._creator_short_desc_widget.height() != height: - self._update_help_btn() - - def _on_splitter_move(self, *args): - self._update_help_btn() - - def _on_help_btn(self): - if self._desc_width_anim_timer.isActive(): - return - - final_size = self.size() - cur_sizes = self._splitter_widget.sizes() - - if self._desc_widget_step == 0: - now_visible = self._detail_description_widget.isVisible() - else: - now_visible = self._desc_widget_step > 0 - - sizes = [] - for idx, value in enumerate(cur_sizes): - if idx < 3: - sizes.append(value) - - self._last_full_width = final_size.width() - self._other_widgets_widths = list(sizes) - - if now_visible: - cur_desc_width = self._detail_description_widget.width() - if cur_desc_width < 1: - cur_desc_width = 2 - step_size = int(cur_desc_width / 5) - if step_size < 1: - step_size = 1 - - step_size *= -1 - expected_width = 0 - desc_width = cur_desc_width - 1 - width = final_size.width() - 1 - min_max = desc_width - self._last_description_width = cur_desc_width - - else: - self._detail_description_widget.setVisible(True) - handle = self._splitter_widget.handle(3) - desc_width = handle.sizeHint().width() - if self._last_description_width: - expected_width = self._last_description_width - else: - hint = self._detail_description_widget.sizeHint() - expected_width = hint.width() - - width = final_size.width() + desc_width - step_size = int(expected_width / 5) - if step_size < 1: - step_size = 1 - min_max = 0 - - if self._last_desc_max_width is None: - self._last_desc_max_width = ( - self._detail_description_widget.maximumWidth() - ) - self._detail_description_widget.setMinimumWidth(min_max) - self._detail_description_widget.setMaximumWidth(min_max) - self._expected_description_width = expected_width - self._desc_widget_step = step_size - - self._desc_width_anim_timer.start() - - sizes.append(desc_width) - - final_size.setWidth(width) - - self._splitter_widget.setSizes(sizes) - self.resize(final_size) - - self._help_btn.set_expanded(not now_visible) - - def _on_desc_animation(self): - current_width = self._detail_description_widget.width() - - desc_width = None - last_step = False - growing = self._desc_widget_step > 0 - - # Growing - if growing: - if current_width < self._expected_description_width: - desc_width = current_width + self._desc_widget_step - if desc_width >= self._expected_description_width: - desc_width = self._expected_description_width - last_step = True - - # Decreasing - elif self._desc_widget_step < 0: - if current_width > self._expected_description_width: - desc_width = current_width + self._desc_widget_step - if desc_width <= self._expected_description_width: - desc_width = self._expected_description_width - last_step = True - - if desc_width is None: - self._desc_widget_step = 0 - self._desc_width_anim_timer.stop() - return - - if last_step and not growing: - self._detail_description_widget.setVisible(False) - QtWidgets.QApplication.processEvents() - - width = self._last_full_width - handle_width = self._splitter_widget.handle(3).width() - if growing: - width += (handle_width + desc_width) - else: - width -= self._last_description_width - if last_step: - width -= handle_width - else: - width += desc_width - - if not last_step or growing: - self._detail_description_widget.setMaximumWidth(desc_width) - self._detail_description_widget.setMinimumWidth(desc_width) - - window_size = self.size() - window_size.setWidth(width) - self.resize(window_size) - if not last_step: - return - - self._desc_widget_step = 0 - self._desc_width_anim_timer.stop() - - if not growing: - return - - self._detail_description_widget.setMinimumWidth(0) - self._detail_description_widget.setMaximumWidth( - self._last_desc_max_width - ) - self._detail_description_widget.setSizePolicy( - self._description_size_policy - ) - - sizes = list(self._other_widgets_widths) - sizes.append(desc_width) - self._splitter_widget.setSizes(sizes) - - def _set_creator_detailed_text(self, creator): - if not creator: - self._detail_description_input.setPlainText("") - return - detailed_description = creator.get_detail_description() or "" - if commonmark: - html = commonmark.commonmark(detailed_description) - self._detail_description_input.setHtml(html) - else: - self._detail_description_input.setMarkdown(detailed_description) - def _set_creator_by_identifier(self, identifier): - creator = self.controller.manual_creators.get(identifier) - self._set_creator(creator) + creator_item = self._controller.creator_items.get(identifier) + self._set_creator(creator_item) - def _set_creator(self, creator): - self._creator_short_desc_widget.set_plugin(creator) - self._set_creator_detailed_text(creator) - self._pre_create_widget.set_plugin(creator) + def _set_creator(self, creator_item): + """Set current creator item. - self._selected_creator = creator + Args: + creator_item (CreatorItem): Item representing creator that can be + triggered by artist. + """ - if not creator: + self._creator_short_desc_widget.set_creator_item(creator_item) + self._set_creator_detailed_text(creator_item) + self._pre_create_widget.set_creator_item(creator_item) + + self._selected_creator = creator_item + + if not creator_item: self._set_context_enabled(False) return if ( - creator.create_allow_context_change + creator_item.create_allow_context_change != self._context_change_is_enabled() ): - self._set_context_enabled(creator.create_allow_context_change) + self._set_context_enabled(creator_item.create_allow_context_change) self._refresh_asset() - default_variants = creator.get_default_variants() + default_variants = creator_item.default_variants if not default_variants: default_variants = ["Main"] - default_variant = creator.get_default_variant() + default_variant = creator_item.default_variant if not default_variant: default_variant = default_variants[0] @@ -1034,14 +666,13 @@ class CreateDialog(QtWidgets.QDialog): self.subset_name_input.setText("< Valid variant >") return - project_name = self.controller.project_name + asset_name = self._get_asset_name() task_name = self._get_task_name() - - asset_doc = copy.deepcopy(self._asset_doc) + creator_idenfier = self._selected_creator.identifier # Calculate subset name with Creator plugin try: - subset_name = self._selected_creator.get_subset_name( - variant_value, task_name, asset_doc, project_name + subset_name = self._controller.get_subset_name( + creator_idenfier, variant_value, task_name, asset_name ) except TaskNotSetError: self._create_btn.setEnabled(False) @@ -1116,41 +747,19 @@ class CreateDialog(QtWidgets.QDialog): self.variant_input.style().polish(self.variant_input) def _on_first_show(self): - center = self.rect().center() - - width, height = self.default_size - self.resize(width, height) - part = int(width / 7) - self._splitter_widget.setSizes( - [part * 2, part * 2, width - (part * 4)] - ) - - new_pos = self.mapToGlobal(center) - new_pos.setX(new_pos.x() - int(self.width() / 2)) - new_pos.setY(new_pos.y() - int(self.height() / 2)) - self.move(new_pos) - - def moveEvent(self, event): - super(CreateDialog, self).moveEvent(event) - self._last_pos = self.pos() + width = self.width() + part = int(width / 4) + rem_width = width - part + self._main_splitter_widget.setSizes([part, rem_width]) + rem_width = rem_width - part + self._creators_splitter.setSizes([part, rem_width]) def showEvent(self, event): - super(CreateDialog, self).showEvent(event) + super(CreateWidget, self).showEvent(event) if self._first_show: self._first_show = False self._on_first_show() - if self._last_pos is not None: - self.move(self._last_pos) - - self._update_help_btn() - - self.refresh() - - def resizeEvent(self, event): - super(CreateDialog, self).resizeEvent(event) - self._update_help_btn() - def _on_create(self): indexes = self._creators_view.selectedIndexes() if not indexes or len(indexes) > 1: @@ -1186,7 +795,7 @@ class CreateDialog(QtWidgets.QDialog): error_msg = None formatted_traceback = None try: - self.controller.create( + self._controller.create( creator_identifier, subset_name, instance_data, @@ -1207,7 +816,7 @@ class CreateDialog(QtWidgets.QDialog): if error_msg is None: self._set_creator(self._selected_creator) - self._emit_message("Creation finished...") + self._controller.emit_card_message("Creation finished...") else: box = CreateErrorMessageBox( creator_label, diff --git a/openpype/tools/publisher/widgets/help_widget.py b/openpype/tools/publisher/widgets/help_widget.py new file mode 100644 index 0000000000..0090111889 --- /dev/null +++ b/openpype/tools/publisher/widgets/help_widget.py @@ -0,0 +1,84 @@ +try: + import commonmark +except Exception: + commonmark = None + +from Qt import QtWidgets, QtCore + + +class HelpButton(QtWidgets.QPushButton): + """Button used to trigger help dialog.""" + + def __init__(self, parent): + super(HelpButton, self).__init__(parent) + self.setObjectName("CreateDialogHelpButton") + self.setText("?") + + +class HelpWidget(QtWidgets.QWidget): + """Widget showing help for single functionality.""" + + def __init__(self, parent): + super(HelpWidget, self).__init__(parent) + + # TODO add hints what to help with? + detail_description_input = QtWidgets.QTextEdit(self) + detail_description_input.setObjectName("CreatorDetailedDescription") + detail_description_input.setTextInteractionFlags( + QtCore.Qt.TextBrowserInteraction + ) + + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.setContentsMargins(0, 0, 0, 0) + main_layout.setSpacing(0) + main_layout.addWidget(detail_description_input, 1) + + self._detail_description_input = detail_description_input + + self.set_detailed_text() + + def set_detailed_text(self, text=None): + if not text: + text = "We didn't prepare help for this part..." + + if commonmark: + html = commonmark.commonmark(text) + self._detail_description_input.setHtml(html) + elif hasattr(self._detail_description_input, "setMarkdown"): + self._detail_description_input.setMarkdown(text) + else: + self._detail_description_input.setText(text) + + +class HelpDialog(QtWidgets.QDialog): + default_width = 530 + default_height = 340 + + def __init__(self, controller, parent): + super(HelpDialog, self).__init__(parent) + + self.setWindowTitle("Help dialog") + + help_content = HelpWidget(self) + + main_layout = QtWidgets.QHBoxLayout(self) + main_layout.addWidget(help_content, 1) + + controller.event_system.add_callback( + "show.detailed.help", self._on_help_request + ) + + self._controller = controller + + self._help_content = help_content + + def _on_help_request(self, event): + message = event.get("message") + self.set_detailed_text(message) + + def set_detailed_text(self, text=None): + self._help_content.set_detailed_text(text) + + def showEvent(self, event): + super(HelpDialog, self).showEvent(event) + self.resize(self.default_width, self.default_height) diff --git a/openpype/tools/publisher/widgets/images/copy.png b/openpype/tools/publisher/widgets/images/copy.png deleted file mode 100644 index 522afcdc87..0000000000 Binary files a/openpype/tools/publisher/widgets/images/copy.png and /dev/null differ diff --git a/openpype/tools/publisher/widgets/images/create.png b/openpype/tools/publisher/widgets/images/create.png new file mode 100644 index 0000000000..d691f364dd Binary files /dev/null and b/openpype/tools/publisher/widgets/images/create.png differ diff --git a/openpype/tools/publisher/widgets/images/download_arrow.png b/openpype/tools/publisher/widgets/images/download_arrow.png deleted file mode 100644 index a35a12fb39..0000000000 Binary files a/openpype/tools/publisher/widgets/images/download_arrow.png and /dev/null differ diff --git a/openpype/tools/publisher/widgets/images/validate.png b/openpype/tools/publisher/widgets/images/validate.png index d3cfa0b75d..c8472e9d31 100644 Binary files a/openpype/tools/publisher/widgets/images/validate.png and b/openpype/tools/publisher/widgets/images/validate.png differ diff --git a/openpype/tools/publisher/widgets/images/view_report.png b/openpype/tools/publisher/widgets/images/view_report.png index 50e214c3f8..6f3efd5e19 100644 Binary files a/openpype/tools/publisher/widgets/images/view_report.png and b/openpype/tools/publisher/widgets/images/view_report.png differ diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index 6e31ba635b..c329ca0e8c 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -54,8 +54,7 @@ class ListItemDelegate(QtWidgets.QStyledItemDelegate): def __init__(self, parent): super(ListItemDelegate, self).__init__(parent) - colors_data = get_objected_colors() - group_color_info = colors_data["publisher"]["list-view-group"] + group_color_info = get_objected_colors("publisher", "list-view-group") self._group_colors = { key: value.get_qcolor() @@ -410,7 +409,7 @@ class InstanceListView(AbstractInstanceView): def __init__(self, controller, parent): super(InstanceListView, self).__init__(parent) - self.controller = controller + self._controller = controller instance_view = InstanceTreeView(self) instance_delegate = ListItemDelegate(instance_view) @@ -521,7 +520,7 @@ class InstanceListView(AbstractInstanceView): # Prepare instances by their groups instances_by_group_name = collections.defaultdict(list) group_names = set() - for instance in self.controller.instances: + for instance in self._controller.instances.values(): group_label = instance.group_label group_names.add(group_label) instances_by_group_name[group_label].append(instance) @@ -724,13 +723,13 @@ class InstanceListView(AbstractInstanceView): widget.update_instance_values() def _on_active_changed(self, changed_instance_id, new_value): - selected_instances, _ = self.get_selected_items() + selected_instance_ids, _ = self.get_selected_items() selected_ids = set() found = False - for instance in selected_instances: - selected_ids.add(instance.id) - if not found and instance.id == changed_instance_id: + for instance_id in selected_instance_ids: + selected_ids.add(instance_id) + if not found and instance_id == changed_instance_id: found = True if not found: @@ -761,32 +760,6 @@ class InstanceListView(AbstractInstanceView): if changed_ids: self.active_changed.emit() - def get_selected_items(self): - """Get selected instance ids and context selection. - - Returns: - tuple: Selected instance ids and boolean if context - is selected. - """ - instances = [] - context_selected = False - instances_by_id = { - instance.id: instance - for instance in self.controller.instances - } - - for index in self._instance_view.selectionModel().selectedIndexes(): - instance_id = index.data(INSTANCE_ID_ROLE) - if not context_selected and instance_id == CONTEXT_ID: - context_selected = True - - elif instance_id is not None: - instance = instances_by_id.get(instance_id) - if instance: - instances.append(instance) - - return instances, context_selected - def _on_selection_change(self, *_args): self.selection_changed.emit() @@ -826,3 +799,102 @@ class InstanceListView(AbstractInstanceView): proxy_index = self._proxy_model.mapFromSource(group_item.index()) if not self._instance_view.isExpanded(proxy_index): self._instance_view.expand(proxy_index) + + def get_selected_items(self): + """Get selected instance ids and context selection. + + Returns: + tuple: Selected instance ids and boolean if context + is selected. + """ + instance_ids = [] + context_selected = False + + for index in self._instance_view.selectionModel().selectedIndexes(): + instance_id = index.data(INSTANCE_ID_ROLE) + if not context_selected and instance_id == CONTEXT_ID: + context_selected = True + + elif instance_id is not None: + instance_ids.append(instance_id) + + return instance_ids, context_selected + + def set_selected_items(self, instance_ids, context_selected): + s_instance_ids = set(instance_ids) + cur_ids, cur_context = self.get_selected_items() + if ( + set(cur_ids) == s_instance_ids + and cur_context == context_selected + ): + return + + view = self._instance_view + src_model = self._instance_model + proxy_model = self._proxy_model + + select_indexes = [] + + select_queue = collections.deque() + select_queue.append( + (src_model.invisibleRootItem(), []) + ) + while select_queue: + queue_item = select_queue.popleft() + item, parent_items = queue_item + + if item.hasChildren(): + new_parent_items = list(parent_items) + new_parent_items.append(item) + for row in range(item.rowCount()): + select_queue.append( + (item.child(row), list(new_parent_items)) + ) + + instance_id = item.data(INSTANCE_ID_ROLE) + if not instance_id: + continue + + if instance_id in s_instance_ids: + select_indexes.append(item.index()) + for parent_item in parent_items: + index = parent_item.index() + proxy_index = proxy_model.mapFromSource(index) + if not view.isExpanded(proxy_index): + view.expand(proxy_index) + + elif context_selected and instance_id == CONTEXT_ID: + select_indexes.append(item.index()) + + selection_model = view.selectionModel() + if not select_indexes: + selection_model.clear() + return + + if len(select_indexes) == 1: + proxy_index = proxy_model.mapFromSource(select_indexes[0]) + selection_model.setCurrentIndex( + proxy_index, + selection_model.ClearAndSelect | selection_model.Rows + ) + return + + first_index = proxy_model.mapFromSource(select_indexes.pop(0)) + last_index = proxy_model.mapFromSource(select_indexes.pop(-1)) + + selection_model.setCurrentIndex( + first_index, + selection_model.ClearAndSelect | selection_model.Rows + ) + + for index in select_indexes: + proxy_index = proxy_model.mapFromSource(index) + selection_model.select( + proxy_index, + selection_model.Select | selection_model.Rows + ) + + selection_model.setCurrentIndex( + last_index, + selection_model.Select | selection_model.Rows + ) diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py new file mode 100644 index 0000000000..5bd3017c2a --- /dev/null +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -0,0 +1,380 @@ +from Qt import QtWidgets, QtCore + +from .border_label_widget import BorderedLabelWidget + +from .card_view_widgets import InstanceCardView +from .list_view_widgets import InstanceListView +from .widgets import ( + SubsetAttributesWidget, + CreateInstanceBtn, + RemoveInstanceBtn, + ChangeViewBtn, +) +from .create_widget import CreateWidget + + +class OverviewWidget(QtWidgets.QFrame): + active_changed = QtCore.Signal() + instance_context_changed = QtCore.Signal() + create_requested = QtCore.Signal() + + anim_end_value = 200 + anim_duration = 200 + + def __init__(self, controller, parent): + super(OverviewWidget, self).__init__(parent) + + self._refreshing_instances = False + self._controller = controller + + create_widget = CreateWidget(controller, self) + + # --- Created Subsets/Instances --- + # Common widget for creation and overview + subset_views_widget = BorderedLabelWidget( + "Subsets to publish", self + ) + + subset_view_cards = InstanceCardView(controller, subset_views_widget) + subset_list_view = InstanceListView(controller, subset_views_widget) + + subset_views_layout = QtWidgets.QStackedLayout() + subset_views_layout.addWidget(subset_view_cards) + subset_views_layout.addWidget(subset_list_view) + subset_views_layout.setCurrentWidget(subset_view_cards) + + # Buttons at the bottom of subset view + create_btn = CreateInstanceBtn(self) + delete_btn = RemoveInstanceBtn(self) + change_view_btn = ChangeViewBtn(self) + + # --- Overview --- + # Subset details widget + subset_attributes_wrap = BorderedLabelWidget( + "Publish options", self + ) + subset_attributes_widget = SubsetAttributesWidget( + controller, subset_attributes_wrap + ) + subset_attributes_wrap.set_center_widget(subset_attributes_widget) + + # Layout of buttons at the bottom of subset view + subset_view_btns_layout = QtWidgets.QHBoxLayout() + subset_view_btns_layout.setContentsMargins(0, 5, 0, 0) + subset_view_btns_layout.addWidget(create_btn) + subset_view_btns_layout.addSpacing(5) + subset_view_btns_layout.addWidget(delete_btn) + subset_view_btns_layout.addStretch(1) + subset_view_btns_layout.addWidget(change_view_btn) + + # Layout of view and buttons + # - widget 'subset_view_widget' is necessary + # - only layout won't be resized automatically to minimum size hint + # on child resize request! + subset_view_widget = QtWidgets.QWidget(subset_views_widget) + subset_view_layout = QtWidgets.QVBoxLayout(subset_view_widget) + subset_view_layout.setContentsMargins(0, 0, 0, 0) + subset_view_layout.addLayout(subset_views_layout, 1) + subset_view_layout.addLayout(subset_view_btns_layout, 0) + + subset_views_widget.set_center_widget(subset_view_widget) + + # Whole subset layout with attributes and details + subset_content_widget = QtWidgets.QWidget(self) + subset_content_layout = QtWidgets.QHBoxLayout(subset_content_widget) + subset_content_layout.setContentsMargins(0, 0, 0, 0) + subset_content_layout.addWidget(create_widget, 7) + subset_content_layout.addWidget(subset_views_widget, 3) + subset_content_layout.addWidget(subset_attributes_wrap, 7) + + # Subset frame layout + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.setContentsMargins(0, 0, 0, 0) + main_layout.addWidget(subset_content_widget, 1) + + change_anim = QtCore.QVariantAnimation() + change_anim.setStartValue(0) + change_anim.setEndValue(self.anim_end_value) + change_anim.setDuration(self.anim_duration) + change_anim.setEasingCurve(QtCore.QEasingCurve.InOutQuad) + + # --- Calbacks for instances/subsets view --- + create_btn.clicked.connect(self._on_create_clicked) + delete_btn.clicked.connect(self._on_delete_clicked) + change_view_btn.clicked.connect(self._on_change_view_clicked) + + change_anim.valueChanged.connect(self._on_change_anim) + change_anim.finished.connect(self._on_change_anim_finished) + + # Selection changed + subset_list_view.selection_changed.connect( + self._on_subset_change + ) + subset_view_cards.selection_changed.connect( + self._on_subset_change + ) + # Active instances changed + subset_list_view.active_changed.connect( + self._on_active_changed + ) + subset_view_cards.active_changed.connect( + self._on_active_changed + ) + # Instance context has changed + subset_attributes_widget.instance_context_changed.connect( + self._on_instance_context_change + ) + + # --- Controller callbacks --- + controller.event_system.add_callback( + "publish.process.started", self._on_publish_start + ) + controller.event_system.add_callback( + "publish.reset.finished", self._on_publish_reset + ) + controller.event_system.add_callback( + "instances.refresh.finished", self._on_instances_refresh + ) + + self._subset_content_widget = subset_content_widget + self._subset_content_layout = subset_content_layout + + self._subset_view_cards = subset_view_cards + self._subset_list_view = subset_list_view + self._subset_views_layout = subset_views_layout + + self._delete_btn = delete_btn + + self._subset_attributes_widget = subset_attributes_widget + self._create_widget = create_widget + self._subset_views_widget = subset_views_widget + self._subset_attributes_wrap = subset_attributes_wrap + + self._change_anim = change_anim + + # Start in create mode + self._create_widget_policy = create_widget.sizePolicy() + self._subset_views_widget_policy = subset_views_widget.sizePolicy() + self._subset_attributes_wrap_policy = ( + subset_attributes_wrap.sizePolicy() + ) + self._max_widget_width = None + self._current_state = "create" + subset_attributes_wrap.setVisible(False) + + def set_state(self, new_state, animate): + if new_state == self._current_state: + return + + self._current_state = new_state + + anim_is_running = ( + self._change_anim.state() == self._change_anim.Running + ) + if not animate: + self._change_visibility_for_state() + if anim_is_running: + self._change_anim.stop() + return + + if self._max_widget_width is None: + self._max_widget_width = self._subset_views_widget.maximumWidth() + + if new_state == "create": + direction = self._change_anim.Backward + else: + direction = self._change_anim.Forward + self._change_anim.setDirection(direction) + + if not anim_is_running: + view_width = self._subset_views_widget.width() + self._subset_views_widget.setMinimumWidth(view_width) + self._subset_views_widget.setMaximumWidth(view_width) + self._change_anim.start() + + def _on_create_clicked(self): + """Pass signal to parent widget which should care about changing state. + + We don't change anything here until the parent will care about it. + """ + + self.create_requested.emit() + + def _on_delete_clicked(self): + instance_ids, _ = self.get_selected_items() + + # Ask user if he really wants to remove instances + dialog = QtWidgets.QMessageBox(self) + dialog.setIcon(QtWidgets.QMessageBox.Question) + dialog.setWindowTitle("Are you sure?") + if len(instance_ids) > 1: + msg = ( + "Do you really want to remove {} instances?" + ).format(len(instance_ids)) + else: + msg = ( + "Do you really want to remove the instance?" + ) + dialog.setText(msg) + dialog.setStandardButtons( + QtWidgets.QMessageBox.Ok | QtWidgets.QMessageBox.Cancel + ) + dialog.setDefaultButton(QtWidgets.QMessageBox.Ok) + dialog.setEscapeButton(QtWidgets.QMessageBox.Cancel) + dialog.exec_() + # Skip if OK was not clicked + if dialog.result() == QtWidgets.QMessageBox.Ok: + instance_ids = set(instance_ids) + self._controller.remove_instances(instance_ids) + + def _on_change_view_clicked(self): + self._change_view_type() + + def _on_subset_change(self, *_args): + # Ignore changes if in middle of refreshing + if self._refreshing_instances: + return + + instance_ids, context_selected = self.get_selected_items() + + # Disable delete button if nothing is selected + self._delete_btn.setEnabled(len(instance_ids) > 0) + + instances_by_id = self._controller.instances + instances = [ + instances_by_id[instance_id] + for instance_id in instance_ids + ] + self._subset_attributes_widget.set_current_instances( + instances, context_selected + ) + + def _on_active_changed(self): + if self._refreshing_instances: + return + self.active_changed.emit() + + def _on_change_anim(self, value): + self._create_widget.setVisible(True) + self._subset_attributes_wrap.setVisible(True) + width = ( + self._subset_content_widget.width() + - ( + self._subset_views_widget.width() + + (self._subset_content_layout.spacing() * 2) + ) + ) + subset_attrs_width = int(float(width) / self.anim_end_value) * value + if subset_attrs_width > width: + subset_attrs_width = width + create_width = width - subset_attrs_width + + self._create_widget.setMinimumWidth(create_width) + self._create_widget.setMaximumWidth(create_width) + self._subset_attributes_wrap.setMinimumWidth(subset_attrs_width) + self._subset_attributes_wrap.setMaximumWidth(subset_attrs_width) + + def _on_change_anim_finished(self): + self._change_visibility_for_state() + self._create_widget.setMinimumWidth(0) + self._create_widget.setMaximumWidth(self._max_widget_width) + self._subset_attributes_wrap.setMinimumWidth(0) + self._subset_attributes_wrap.setMaximumWidth(self._max_widget_width) + self._subset_views_widget.setMinimumWidth(0) + self._subset_views_widget.setMaximumWidth(self._max_widget_width) + self._create_widget.setSizePolicy( + self._create_widget_policy + ) + self._subset_attributes_wrap.setSizePolicy( + self._subset_attributes_wrap_policy + ) + self._subset_views_widget.setSizePolicy( + self._subset_views_widget_policy + ) + + def _change_visibility_for_state(self): + self._create_widget.setVisible( + self._current_state == "create" + ) + self._subset_attributes_wrap.setVisible( + self._current_state == "publish" + ) + + def _on_instance_context_change(self): + current_idx = self._subset_views_layout.currentIndex() + for idx in range(self._subset_views_layout.count()): + if idx == current_idx: + continue + widget = self._subset_views_layout.widget(idx) + if widget.refreshed: + widget.set_refreshed(False) + + current_widget = self._subset_views_layout.widget(current_idx) + current_widget.refresh_instance_states() + + self.instance_context_changed.emit() + + def get_selected_items(self): + view = self._subset_views_layout.currentWidget() + return view.get_selected_items() + + def _change_view_type(self): + idx = self._subset_views_layout.currentIndex() + new_idx = (idx + 1) % self._subset_views_layout.count() + + old_view = self._subset_views_layout.currentWidget() + new_view = self._subset_views_layout.widget(new_idx) + + if not new_view.refreshed: + new_view.refresh() + new_view.set_refreshed(True) + else: + new_view.refresh_instance_states() + + instance_ids, context_selected = old_view.get_selected_items() + new_view.set_selected_items(instance_ids, context_selected) + + self._subset_views_layout.setCurrentIndex(new_idx) + + self._on_subset_change() + + def _refresh_instances(self): + if self._refreshing_instances: + return + + self._refreshing_instances = True + + for idx in range(self._subset_views_layout.count()): + widget = self._subset_views_layout.widget(idx) + widget.set_refreshed(False) + + view = self._subset_views_layout.currentWidget() + view.refresh() + view.set_refreshed(True) + + self._refreshing_instances = False + + # Force to change instance and refresh details + self._on_subset_change() + + def _on_publish_start(self): + """Publish started.""" + + self._subset_attributes_wrap.setEnabled(False) + + def _on_publish_reset(self): + """Context in controller has been refreshed.""" + + self._subset_attributes_wrap.setEnabled(True) + self._subset_content_widget.setEnabled(self._controller.host_is_valid) + + def _on_instances_refresh(self): + """Controller refreshed instances.""" + + self._refresh_instances() + + # Give a change to process Resize Request + QtWidgets.QApplication.processEvents() + # Trigger update geometry of + widget = self._subset_views_layout.currentWidget() + widget.updateGeometry() diff --git a/openpype/tools/publisher/widgets/precreate_widget.py b/openpype/tools/publisher/widgets/precreate_widget.py index eaadfe890b..ef34c9bcb5 100644 --- a/openpype/tools/publisher/widgets/precreate_widget.py +++ b/openpype/tools/publisher/widgets/precreate_widget.py @@ -58,12 +58,12 @@ class PreCreateWidget(QtWidgets.QWidget): def current_value(self): return self._attributes_widget.current_value() - def set_plugin(self, creator): + def set_creator_item(self, creator_item): attr_defs = [] creator_selected = False - if creator is not None: + if creator_item is not None: creator_selected = True - attr_defs = creator.get_pre_create_attr_defs() + attr_defs = creator_item.pre_create_attributes_defs self._attributes_widget.set_attr_defs(attr_defs) diff --git a/openpype/tools/publisher/widgets/publish_frame.py b/openpype/tools/publisher/widgets/publish_frame.py new file mode 100644 index 0000000000..e6333a104f --- /dev/null +++ b/openpype/tools/publisher/widgets/publish_frame.py @@ -0,0 +1,524 @@ +import os +import json +import time + +from Qt import QtWidgets, QtCore + +from .widgets import ( + StopBtn, + ResetBtn, + ValidateBtn, + PublishBtn, + PublishReportBtn, +) + + +class PublishFrame(QtWidgets.QWidget): + """Frame showed during publishing. + + Shows all information related to publishing. Contains validation error + widget which is showed if only validation error happens during validation. + + Processing layer is default layer. Validation error layer is shown if only + validation exception is raised during publishing. Report layer is available + only when publishing process is stopped and must be manually triggered to + change into that layer. + + +------------------------------------------------------------------------+ + | < Main label > | + | < Label top > | + | (#### 10% ) | + | | + | | + +------------------------------------------------------------------------+ + """ + + details_page_requested = QtCore.Signal() + + def __init__(self, controller, borders, parent): + super(PublishFrame, self).__init__(parent) + + # Bottom part of widget where process and callback buttons are showed + # - QFrame used to be able set background using stylesheets easily + # and not override all children widgets style + content_frame = QtWidgets.QFrame(self) + content_frame.setObjectName("PublishInfoFrame") + + top_content_widget = QtWidgets.QWidget(content_frame) + + # Center widget displaying current state (without any specific info) + main_label = QtWidgets.QLabel(top_content_widget) + main_label.setObjectName("PublishInfoMainLabel") + main_label.setAlignment(QtCore.Qt.AlignCenter) + + # Supporting labels for main label + # Top label is displayed just under main label + message_label_top = QtWidgets.QLabel(top_content_widget) + message_label_top.setAlignment(QtCore.Qt.AlignCenter) + + # Label showing currently processed instance + progress_widget = QtWidgets.QWidget(top_content_widget) + instance_plugin_widget = QtWidgets.QWidget(progress_widget) + instance_label = QtWidgets.QLabel( + "", instance_plugin_widget + ) + instance_label.setAlignment( + QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter + ) + # Label showing currently processed plugin + plugin_label = QtWidgets.QLabel( + "", instance_plugin_widget + ) + plugin_label.setAlignment( + QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter + ) + instance_plugin_layout = QtWidgets.QHBoxLayout(instance_plugin_widget) + instance_plugin_layout.setContentsMargins(0, 0, 0, 0) + instance_plugin_layout.addWidget(instance_label, 1) + instance_plugin_layout.addWidget(plugin_label, 1) + + # Progress bar showing progress of publishing + progress_bar = QtWidgets.QProgressBar(progress_widget) + progress_bar.setObjectName("PublishProgressBar") + + progress_layout = QtWidgets.QVBoxLayout(progress_widget) + progress_layout.setSpacing(5) + progress_layout.setContentsMargins(0, 0, 0, 0) + progress_layout.addWidget(instance_plugin_widget, 0) + progress_layout.addWidget(progress_bar, 0) + + top_content_layout = QtWidgets.QVBoxLayout(top_content_widget) + top_content_layout.setContentsMargins(0, 0, 0, 0) + top_content_layout.setSpacing(5) + top_content_layout.setAlignment(QtCore.Qt.AlignCenter) + top_content_layout.addWidget(main_label) + # TODO stretches should be probably replaced by spacing... + # - stretch in floating frame doesn't make sense + top_content_layout.addWidget(message_label_top) + top_content_layout.addWidget(progress_widget) + + # Publishing buttons to stop, reset or trigger publishing + footer_widget = QtWidgets.QWidget(content_frame) + + report_btn = PublishReportBtn(footer_widget) + + shrunk_main_label = QtWidgets.QLabel(footer_widget) + shrunk_main_label.setObjectName("PublishInfoMainLabel") + shrunk_main_label.setAlignment( + QtCore.Qt.AlignVCenter | QtCore.Qt.AlignLeft + ) + + reset_btn = ResetBtn(footer_widget) + stop_btn = StopBtn(footer_widget) + validate_btn = ValidateBtn(footer_widget) + publish_btn = PublishBtn(footer_widget) + + report_btn.add_action("Go to details", "go_to_report") + report_btn.add_action("Copy report", "copy_report") + report_btn.add_action("Export report", "export_report") + + # Footer on info frame layout + footer_layout = QtWidgets.QHBoxLayout(footer_widget) + footer_layout.setContentsMargins(0, 0, 0, 0) + footer_layout.addWidget(report_btn, 0) + footer_layout.addWidget(shrunk_main_label, 1) + footer_layout.addWidget(reset_btn, 0) + footer_layout.addWidget(stop_btn, 0) + footer_layout.addWidget(validate_btn, 0) + footer_layout.addWidget(publish_btn, 0) + + # Info frame content + content_layout = QtWidgets.QVBoxLayout(content_frame) + content_layout.setSpacing(5) + + content_layout.addWidget(top_content_widget) + content_layout.addWidget(footer_widget) + + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.setContentsMargins(borders, 0, borders, borders) + main_layout.addWidget(content_frame) + + shrunk_anim = QtCore.QVariantAnimation() + shrunk_anim.setDuration(140) + shrunk_anim.setEasingCurve(QtCore.QEasingCurve.InOutQuad) + + # Force translucent background for widgets + for widget in ( + self, + top_content_widget, + footer_widget, + progress_widget, + instance_plugin_widget, + ): + widget.setAttribute(QtCore.Qt.WA_TranslucentBackground) + + report_btn.triggered.connect(self._on_report_triggered) + reset_btn.clicked.connect(self._on_reset_clicked) + stop_btn.clicked.connect(self._on_stop_clicked) + validate_btn.clicked.connect(self._on_validate_clicked) + publish_btn.clicked.connect(self._on_publish_clicked) + + shrunk_anim.valueChanged.connect(self._on_shrunk_anim) + shrunk_anim.finished.connect(self._on_shrunk_anim_finish) + + controller.event_system.add_callback( + "publish.reset.finished", self._on_publish_reset + ) + controller.event_system.add_callback( + "publish.process.started", self._on_publish_start + ) + controller.event_system.add_callback( + "publish.has_validated.changed", self._on_publish_validated_change + ) + controller.event_system.add_callback( + "publish.process.stopped", self._on_publish_stop + ) + + controller.event_system.add_callback( + "publish.process.instance.changed", self._on_instance_change + ) + controller.event_system.add_callback( + "publish.process.plugin.changed", self._on_plugin_change + ) + + self._shrunk_anim = shrunk_anim + + self._controller = controller + + self._content_frame = content_frame + self._content_layout = content_layout + self._top_content_layout = top_content_layout + self._top_content_widget = top_content_widget + + self._main_label = main_label + self._message_label_top = message_label_top + + self._instance_label = instance_label + self._plugin_label = plugin_label + + self._progress_bar = progress_bar + self._progress_widget = progress_widget + + self._shrunk_main_label = shrunk_main_label + self._reset_btn = reset_btn + self._stop_btn = stop_btn + self._validate_btn = validate_btn + self._publish_btn = publish_btn + + self._shrunken = False + self._top_widget_max_height = None + self._top_widget_size_policy = top_content_widget.sizePolicy() + self._last_instance_label = None + self._last_plugin_label = None + + def mouseReleaseEvent(self, event): + super(PublishFrame, self).mouseReleaseEvent(event) + self._change_shrunk_state() + + def _change_shrunk_state(self): + self.set_shrunk_state(not self._shrunken) + + def set_shrunk_state(self, shrunk): + if shrunk is self._shrunken: + return + + if self._top_widget_max_height is None: + self._top_widget_max_height = ( + self._top_content_widget.maximumHeight() + ) + + self._shrunken = shrunk + + anim_is_running = ( + self._shrunk_anim.state() == self._shrunk_anim.Running + ) + if not self.isVisible(): + if anim_is_running: + self._shrunk_anim.stop() + self._on_shrunk_anim_finish() + return + + start = 0 + end = 0 + if shrunk: + start = self._top_content_widget.height() + else: + if anim_is_running: + start = self._shrunk_anim.currentValue() + hint = self._top_content_widget.minimumSizeHint() + end = hint.height() + + self._shrunk_anim.setStartValue(start) + self._shrunk_anim.setEndValue(end) + if not anim_is_running: + self._shrunk_anim.start() + + def _on_shrunk_anim(self, value): + diff = self._top_content_widget.height() - value + if not self._top_content_widget.isVisible(): + diff -= self._content_layout.spacing() + + window_pos = self.pos() + window_pos_y = window_pos.y() + diff + window_height = self.height() - diff + + self._top_content_widget.setMinimumHeight(value) + self._top_content_widget.setMaximumHeight(value) + self._top_content_widget.setVisible(True) + + self.resize(self.width(), window_height) + self.move(window_pos.x(), window_pos_y) + + def _on_shrunk_anim_finish(self): + self._top_content_widget.setVisible(not self._shrunken) + self._top_content_widget.setMinimumHeight(0) + self._top_content_widget.setMaximumHeight( + self._top_widget_max_height + ) + self._top_content_widget.setSizePolicy(self._top_widget_size_policy) + + if self._shrunken: + self._shrunk_main_label.setText(self._main_label.text()) + else: + self._shrunk_main_label.setText("") + + if self._shrunken: + content_frame_hint = self._content_frame.sizeHint() + + layout = self.layout() + margins = layout.contentsMargins() + window_height = ( + content_frame_hint.height() + + margins.bottom() + + margins.top() + ) + diff = self.height() - window_height + window_pos = self.pos() + window_pos_y = window_pos.y() + diff + self.resize(self.width(), window_height) + self.move(window_pos.x(), window_pos_y) + + def _set_main_label(self, message): + self._main_label.setText(message) + if self._shrunken: + self._shrunk_main_label.setText(message) + + def _on_publish_reset(self): + self._last_instance_label = None + self._last_plugin_label = None + + self._set_success_property() + self._set_progress_visibility(True) + + self._main_label.setText("Hit publish (play button)! If you want") + self._message_label_top.setText("") + + self._reset_btn.setEnabled(True) + self._stop_btn.setEnabled(False) + self._validate_btn.setEnabled(True) + self._publish_btn.setEnabled(True) + + self._progress_bar.setValue(self._controller.publish_progress) + self._progress_bar.setMaximum(self._controller.publish_max_progress) + + def _on_publish_start(self): + if self._last_plugin_label: + self._plugin_label.setText(self._last_plugin_label) + + if self._last_instance_label: + self._instance_label.setText(self._last_instance_label) + + self._set_success_property(3) + self._set_progress_visibility(True) + self._set_main_label("Publishing...") + + self._reset_btn.setEnabled(False) + self._stop_btn.setEnabled(True) + self._validate_btn.setEnabled(False) + self._publish_btn.setEnabled(False) + + self.set_shrunk_state(False) + + def _on_publish_validated_change(self, event): + if event["value"]: + self._validate_btn.setEnabled(False) + + def _on_instance_change(self, event): + """Change instance label when instance is going to be processed.""" + + self._last_instance_label = event["instance_label"] + self._instance_label.setText(event["instance_label"]) + QtWidgets.QApplication.processEvents() + + def _on_plugin_change(self, event): + """Change plugin label when instance is going to be processed.""" + + self._last_plugin_label = event["plugin_label"] + self._progress_bar.setValue(self._controller.publish_progress) + self._plugin_label.setText(event["plugin_label"]) + QtWidgets.QApplication.processEvents() + + def _on_publish_stop(self): + self._progress_bar.setValue(self._controller.publish_progress) + + self._reset_btn.setEnabled(True) + self._stop_btn.setEnabled(False) + + self._instance_label.setText("") + self._plugin_label.setText("") + + validate_enabled = not self._controller.publish_has_crashed + publish_enabled = not self._controller.publish_has_crashed + if validate_enabled: + validate_enabled = not self._controller.publish_has_validated + if publish_enabled: + if ( + self._controller.publish_has_validated + and self._controller.publish_has_validation_errors + ): + publish_enabled = False + + else: + publish_enabled = not self._controller.publish_has_finished + + self._validate_btn.setEnabled(validate_enabled) + self._publish_btn.setEnabled(publish_enabled) + + if self._controller.publish_has_crashed: + self._set_error_msg() + + elif self._controller.publish_has_validation_errors: + self._set_progress_visibility(False) + self._set_validation_errors() + + elif self._controller.publish_has_finished: + self._set_finished() + + else: + self._set_stopped() + + def _set_stopped(self): + main_label = "Publish paused" + if self._controller.publish_has_validated: + main_label += " - Validation passed" + + self._set_main_label(main_label) + self._message_label_top.setText( + "Hit publish (play button) to continue." + ) + + self._set_success_property(4) + + def _set_error_msg(self): + """Show error message to artist on publish crash.""" + + self._set_main_label("Error happened") + + self._message_label_top.setText(self._controller.publish_error_msg) + + self._set_success_property(1) + + def _set_validation_errors(self): + self._set_main_label("Your publish didn't pass studio validations") + self._message_label_top.setText("Check results above please") + self._set_success_property(2) + + def _set_finished(self): + self._set_main_label("Finished") + self._message_label_top.setText("") + self._set_success_property(0) + + def _set_progress_visibility(self, visible): + window_height = self.height() + self._progress_widget.setVisible(visible) + # Ignore rescaling and move of widget if is shrunken of progress bar + # should be visible + if self._shrunken or visible: + return + + height = self._progress_widget.height() + diff = height + self._top_content_layout.spacing() + + window_pos = self.pos() + window_pos_y = self.pos().y() + diff + window_height -= diff + + self.resize(self.width(), window_height) + self.move(window_pos.x(), window_pos_y) + + def _set_success_property(self, state=None): + """Apply styles by state. + + State enum: + - None - Default state after restart + - 0 - Success finish + - 1 - Error happened + - 2 - Validation error + - 3 - In progress + - 4 - Stopped/Paused + """ + + if state is None: + state = "" + else: + state = str(state) + + for widget in (self._progress_bar, self._content_frame): + if widget.property("state") != state: + widget.setProperty("state", state) + widget.style().polish(widget) + + def _copy_report(self): + logs = self._controller.get_publish_report() + logs_string = json.dumps(logs, indent=4) + + mime_data = QtCore.QMimeData() + mime_data.setText(logs_string) + QtWidgets.QApplication.instance().clipboard().setMimeData( + mime_data + ) + + def _export_report(self): + default_filename = "publish-report-{}".format( + time.strftime("%y%m%d-%H-%M") + ) + default_filepath = os.path.join( + os.path.expanduser("~"), + default_filename + ) + new_filepath, ext = QtWidgets.QFileDialog.getSaveFileName( + self, "Save report", default_filepath, ".json" + ) + if not ext or not new_filepath: + return + + logs = self._controller.get_publish_report() + full_path = new_filepath + ext + dir_path = os.path.dirname(full_path) + if not os.path.exists(dir_path): + os.makedirs(dir_path) + + with open(full_path, "w") as file_stream: + json.dump(logs, file_stream) + + def _on_report_triggered(self, identifier): + if identifier == "export_report": + self._export_report() + + elif identifier == "copy_report": + self._copy_report() + + elif identifier == "go_to_report": + self.details_page_requested.emit() + + def _on_reset_clicked(self): + self._controller.reset() + + def _on_stop_clicked(self): + self._controller.stop_publish() + + def _on_validate_clicked(self): + self._controller.validate() + + def _on_publish_clicked(self): + self._controller.publish() diff --git a/openpype/tools/publisher/widgets/publish_widget.py b/openpype/tools/publisher/widgets/publish_widget.py deleted file mode 100644 index 80d0265dd3..0000000000 --- a/openpype/tools/publisher/widgets/publish_widget.py +++ /dev/null @@ -1,521 +0,0 @@ -import os -import json -import time - -from Qt import QtWidgets, QtCore, QtGui - -from openpype.pipeline import KnownPublishError - -from .validations_widget import ValidationsWidget -from ..publish_report_viewer import PublishReportViewerWidget -from .widgets import ( - StopBtn, - ResetBtn, - ValidateBtn, - PublishBtn, - CopyPublishReportBtn, - SavePublishReportBtn, - ShowPublishReportBtn -) - - -class ActionsButton(QtWidgets.QToolButton): - def __init__(self, parent=None): - super(ActionsButton, self).__init__(parent) - - self.setText("< No action >") - self.setPopupMode(self.MenuButtonPopup) - menu = QtWidgets.QMenu(self) - - self.setMenu(menu) - - self._menu = menu - self._actions = [] - self._current_action = None - - self.clicked.connect(self._on_click) - - def current_action(self): - return self._current_action - - def add_action(self, action): - self._actions.append(action) - action.triggered.connect(self._on_action_trigger) - self._menu.addAction(action) - if self._current_action is None: - self._set_action(action) - - def set_action(self, action): - if action not in self._actions: - self.add_action(action) - self._set_action(action) - - def _set_action(self, action): - if action is self._current_action: - return - self._current_action = action - self.setText(action.text()) - self.setIcon(action.icon()) - - def _on_click(self): - self._current_action.trigger() - - def _on_action_trigger(self): - action = self.sender() - if action not in self._actions: - return - - self._set_action(action) - - -class PublishFrame(QtWidgets.QFrame): - """Frame showed during publishing. - - Shows all information related to publishing. Contains validation error - widget which is showed if only validation error happens during validation. - - Processing layer is default layer. Validation error layer is shown if only - validation exception is raised during publishing. Report layer is available - only when publishing process is stopped and must be manually triggered to - change into that layer. - - +------------------------------------------------------------------------+ - | | - | | - | | - | < Validation error widget > | - | | - | | - | | - | | - +------------------------------------------------------------------------+ - | < Main label > | - | < Label top > | - | (#### 10% ) | - | | - | Report: